a34b7a0c9ddf5eff96664e9341c26f4495641d56
[WebKit-https.git] / Source / JavaScriptCore / jit / JITInlines.h
1 /*
2  * Copyright (C) 2008, 2012, 2013, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JITInlines_h
27 #define JITInlines_h
28
29 #if ENABLE(JIT)
30
31 #include "JSCInlines.h"
32
33 namespace JSC {
34
35 #if USE(JSVALUE64)
36 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
37 {
38     JumpList slowCases = emitDoubleLoad(instruction, badType);
39     moveDoubleTo64(fpRegT0, regT0);
40     sub64(tagTypeNumberRegister, regT0);
41     return slowCases;
42 }
43 #else
44 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
45 {
46     JumpList slowCases = emitDoubleLoad(instruction, badType);
47     moveDoubleToInts(fpRegT0, regT0, regT1);
48     return slowCases;
49 }
50 #endif // USE(JSVALUE64)
51
52 ALWAYS_INLINE MacroAssembler::JumpList JIT::emitLoadForArrayMode(Instruction* currentInstruction, JITArrayMode arrayMode, PatchableJump& badType)
53 {
54     switch (arrayMode) {
55     case JITInt32:
56         return emitInt32Load(currentInstruction, badType);
57     case JITDouble:
58         return emitDoubleLoad(currentInstruction, badType);
59     case JITContiguous:
60         return emitContiguousLoad(currentInstruction, badType);
61     case JITArrayStorage:
62         return emitArrayStorageLoad(currentInstruction, badType);
63     default:
64         break;
65     }
66     RELEASE_ASSERT_NOT_REACHED();
67     return MacroAssembler::JumpList();
68 }
69
70 inline MacroAssembler::JumpList JIT::emitContiguousGetByVal(Instruction* instruction, PatchableJump& badType, IndexingType expectedShape)
71 {
72     return emitContiguousLoad(instruction, badType, expectedShape);
73 }
74
75 inline MacroAssembler::JumpList JIT::emitArrayStorageGetByVal(Instruction* instruction, PatchableJump& badType)
76 {
77     return emitArrayStorageLoad(instruction, badType);
78 }
79
80 ALWAYS_INLINE bool JIT::isOperandConstantDouble(int src)
81 {
82     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
83 }
84
85 ALWAYS_INLINE JSValue JIT::getConstantOperand(int src)
86 {
87     ASSERT(m_codeBlock->isConstantRegisterIndex(src));
88     return m_codeBlock->getConstant(src);
89 }
90
91 ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
92 {
93 #if USE(JSVALUE32_64)
94     store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
95     store32(from, intPayloadFor(entry, callFrameRegister));
96 #else
97     store64(from, addressFor(entry, callFrameRegister));
98 #endif
99 }
100
101 ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
102 {
103     failures.append(branchStructure(NotEqual, Address(src, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
104     failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
105     loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
106     failures.append(branchTest32(Zero, dst));
107     loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1);
108     loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst);
109
110     JumpList is16Bit;
111     JumpList cont8Bit;
112     is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
113     load8(MacroAssembler::Address(dst, 0), dst);
114     cont8Bit.append(jump());
115     is16Bit.link(this);
116     load16(MacroAssembler::Address(dst, 0), dst);
117     cont8Bit.link(this);
118 }
119
120 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
121 {
122     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
123     Call nakedCall = nearCall();
124     m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
125     return nakedCall;
126 }
127
128 ALWAYS_INLINE JIT::Call JIT::emitNakedTailCall(CodePtr function)
129 {
130     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
131     Call nakedCall = nearTailCall();
132     m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
133     return nakedCall;
134 }
135
136 ALWAYS_INLINE void JIT::updateTopCallFrame()
137 {
138     ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
139 #if USE(JSVALUE32_64)
140     Instruction* instruction = m_codeBlock->instructions().begin() + m_bytecodeOffset + 1; 
141     uint32_t locationBits = CallSiteIndex(instruction).bits();
142 #else
143     uint32_t locationBits = CallSiteIndex(m_bytecodeOffset + 1).bits();
144 #endif
145     store32(TrustedImm32(locationBits), intTagFor(JSStack::ArgumentCount));
146     storePtr(callFrameRegister, &m_vm->topCallFrame);
147 }
148
149 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr& function)
150 {
151     updateTopCallFrame();
152     MacroAssembler::Call call = appendCall(function);
153     exceptionCheck();
154     return call;
155 }
156
157 #if OS(WINDOWS) && CPU(X86_64)
158 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr& function)
159 {
160     updateTopCallFrame();
161     MacroAssembler::Call call = appendCallWithSlowPathReturnType(function);
162     exceptionCheck();
163     return call;
164 }
165 #endif
166
167 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr& function)
168 {
169     updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
170     MacroAssembler::Call call = appendCall(function);
171     exceptionCheckWithCallFrameRollback();
172     return call;
173 }
174
175 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr& function, int dst)
176 {
177     MacroAssembler::Call call = appendCallWithExceptionCheck(function);
178 #if USE(JSVALUE64)
179     emitPutVirtualRegister(dst, returnValueGPR);
180 #else
181     emitStore(dst, returnValueGPR2, returnValueGPR);
182 #endif
183     return call;
184 }
185
186 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr& function, int dst)
187 {
188     MacroAssembler::Call call = appendCallWithExceptionCheck(function);
189     emitValueProfilingSite();
190 #if USE(JSVALUE64)
191     emitPutVirtualRegister(dst, returnValueGPR);
192 #else
193     emitStore(dst, returnValueGPR2, returnValueGPR);
194 #endif
195     return call;
196 }
197
198 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_E operation)
199 {
200     setupArgumentsExecState();
201     return appendCallWithExceptionCheck(operation);
202 }
203
204 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(Z_JITOperation_E operation)
205 {
206     setupArgumentsExecState();
207     updateTopCallFrame();
208     return appendCall(operation);
209 }
210
211 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_E operation)
212 {
213     setupArgumentsExecState();
214     return appendCallWithExceptionCheck(operation);
215 }
216
217 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJsc operation, GPRReg arg1)
218 {
219     setupArgumentsWithExecState(arg1);
220     return appendCallWithExceptionCheck(operation);
221 }
222
223 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJscZ operation, GPRReg arg1, int32_t arg2)
224 {
225     setupArgumentsWithExecState(arg1, TrustedImm32(arg2));
226     return appendCallWithExceptionCheck(operation);
227 }
228
229 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, GPRReg arg1)
230 {
231     setupArgumentsWithExecState(arg1);
232     return appendCallWithExceptionCheck(operation);
233 }
234     
235 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, TrustedImmPtr arg1)
236 {
237     setupArgumentsWithExecState(arg1);
238     return appendCallWithExceptionCheck(operation);
239 }
240     
241 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EO operation, GPRReg arg)
242 {
243     setupArgumentsWithExecState(arg);
244     return appendCallWithExceptionCheck(operation);
245 }
246
247 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_ESt operation, Structure* structure)
248 {
249     setupArgumentsWithExecState(TrustedImmPtr(structure));
250     return appendCallWithExceptionCheck(operation);
251 }
252
253 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EZ operation, int32_t arg)
254 {
255     setupArgumentsWithExecState(TrustedImm32(arg));
256     return appendCallWithExceptionCheck(operation);
257 }
258
259 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_E operation, int dst)
260 {
261     setupArgumentsExecState();
262     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
263 }
264
265 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2, int32_t arg3)
266 {
267     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2, TrustedImm32(arg3));
268     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
269 }
270
271 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, const JSValue* arg2, int32_t arg3)
272 {
273     setupArgumentsWithExecState(TrustedImmPtr(arg1), TrustedImmPtr(arg2), TrustedImm32(arg3));
274     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
275 }
276
277 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EC operation, int dst, JSCell* cell)
278 {
279     setupArgumentsWithExecState(TrustedImmPtr(cell));
280     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
281 }
282
283 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, JSCell* cell)
284 {
285     setupArgumentsWithExecState(TrustedImmPtr(cell));
286     return appendCallWithExceptionCheck(operation);
287 }
288
289 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJscC operation, int dst, GPRReg arg1, JSCell* cell)
290 {
291     setupArgumentsWithExecState(arg1, TrustedImmPtr(cell));
292     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
293 }
294
295 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJscCJ operation, int dst, GPRReg arg1, JSCell* cell, GPRReg arg2)
296 {
297     setupArgumentsWithExecState(arg1, TrustedImmPtr(cell), arg2);
298     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
299 }
300     
301 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EP operation, int dst, void* pointer)
302 {
303     setupArgumentsWithExecState(TrustedImmPtr(pointer));
304     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
305 }
306
307 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(WithProfileTag, J_JITOperation_EPc operation, int dst, Instruction* bytecodePC)
308 {
309     setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
310     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
311 }
312
313 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EPc operation, int dst, Instruction* bytecodePC)
314 {
315     setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
316     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
317 }
318
319 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZ operation, int dst, int32_t arg)
320 {
321     setupArgumentsWithExecState(TrustedImm32(arg));
322     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
323 }
324
325 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZZ operation, int dst, int32_t arg1, int32_t arg2)
326 {
327     setupArgumentsWithExecState(TrustedImm32(arg1), TrustedImm32(arg2));
328     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
329 }
330
331 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
332 {
333     setupArgumentsWithExecState(regOp1, regOp2);
334     return appendCallWithExceptionCheck(operation);
335 }
336
337 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EOJss operation, RegisterID regOp1, RegisterID regOp2)
338 {
339     setupArgumentsWithExecState(regOp1, regOp2);
340     return appendCallWithExceptionCheck(operation);
341 }
342
343 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Sprt_JITOperation_EZ operation, int32_t op)
344 {
345 #if OS(WINDOWS) && CPU(X86_64)
346     setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op));
347     return appendCallWithExceptionCheckAndSlowPathReturnType(operation);
348 #else
349     setupArgumentsWithExecState(TrustedImm32(op));
350     return appendCallWithExceptionCheck(operation);
351 #endif
352 }
353
354 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_E operation)
355 {
356     setupArgumentsExecState();
357     return appendCallWithExceptionCheck(operation);
358 }
359
360 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, RegisterID regOp)
361 {
362     setupArgumentsWithExecState(regOp);
363     return appendCallWithExceptionCheck(operation);
364 }
365
366 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
367 {
368     setupArgumentsWithExecState(regOp1, regOp2);
369     return appendCallWithExceptionCheck(operation);
370 }
371
372 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EE operation, RegisterID regOp)
373 {
374     setupArgumentsWithExecState(regOp);
375     updateTopCallFrame();
376     return appendCallWithExceptionCheck(operation);
377 }
378
379 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EPc operation, Instruction* bytecodePC)
380 {
381     setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
382     return appendCallWithExceptionCheck(operation);
383 }
384
385 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZ operation, int32_t op)
386 {
387     setupArgumentsWithExecState(TrustedImm32(op));
388     return appendCallWithExceptionCheck(operation);
389 }
390
391 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation)
392 {
393     setupArgumentsExecState();
394     return appendCallWithCallFrameRollbackOnException(operation);
395 }
396
397 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation, CodeBlock* pointer)
398 {
399     setupArgumentsWithExecState(TrustedImmPtr(pointer));
400     return appendCallWithCallFrameRollbackOnException(operation);
401 }
402
403 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation)
404 {
405     setupArgumentsExecState();
406     return appendCallWithCallFrameRollbackOnException(operation);
407 }
408
409
410 #if USE(JSVALUE64)
411 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1, int32_t arg2, int32_t arg3)
412 {
413     setupArgumentsWithExecState(arg1, TrustedImm32(arg2), TrustedImm32(arg3));
414     return appendCallWithExceptionCheck(operation);
415 }
416
417 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2, int32_t arg3, GPRReg arg4)
418 {
419     setupArgumentsWithExecState(arg1, arg2, TrustedImm32(arg3), arg4);
420     return appendCallWithExceptionCheck(operation);
421 }
422
423 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1, RegisterID regOp2, UniquedStringImpl* uid)
424 {
425     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1, regOp2, TrustedImmPtr(uid));
426     return appendCallWithExceptionCheck(operation);
427 }
428
429 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3)
430 {
431     setupArgumentsWithExecState(regOp1, regOp2, regOp3);
432     return appendCallWithExceptionCheck(operation);
433 }
434
435 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJAp operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3, ArrayProfile* arrayProfile)
436 {
437     setupArgumentsWithExecState(regOp1, regOp2, regOp3, TrustedImmPtr(arrayProfile));
438     return appendCallWithExceptionCheck(operation);
439 }
440
441 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJBy operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3, ByValInfo* byValInfo)
442 {
443     setupArgumentsWithExecState(regOp1, regOp2, regOp3, TrustedImmPtr(byValInfo));
444     return appendCallWithExceptionCheck(operation);
445 }
446
447 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, GPRReg arg)
448 {
449     setupArgumentsWithExecState(TrustedImm32(dst), arg);
450     return appendCallWithExceptionCheck(operation);
451 }
452
453 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1, UniquedStringImpl* uid)
454 {
455     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1, TrustedImmPtr(uid));
456     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
457 }
458
459 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
460 {
461     setupArgumentsWithExecState(arg1, arg2);
462     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
463 }
464
465 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2)
466 {
467     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2);
468     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
469 }
470
471 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1)
472 {
473     setupArgumentsWithExecState(arg1);
474     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
475 }
476
477 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1, const Identifier* arg2)
478 {
479     setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
480     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
481 }
482
483 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
484 {
485     setupArgumentsWithExecState(arg1, arg2);
486     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
487 }
488
489 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJAp operation, int dst, GPRReg arg1, GPRReg arg2, ArrayProfile* arrayProfile)
490 {
491     setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(arrayProfile));
492     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
493 }
494
495 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJBy operation, int dst, GPRReg arg1, GPRReg arg2, ByValInfo* byValInfo)
496 {
497     setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(byValInfo));
498     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
499 }
500
501 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1)
502 {
503     setupArgumentsWithExecState(arg1);
504     updateTopCallFrame();
505     return appendCall(operation);
506 }
507
508 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1, size_t arg2)
509 {
510     setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
511     return appendCallWithExceptionCheck(operation);
512 }
513
514 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID regOp)
515 {
516     setupArgumentsWithExecState(regOp);
517     return appendCallWithExceptionCheck(operation);
518 }
519
520 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID regOp1, RegisterID regOp2)
521 {
522     setupArgumentsWithExecState(regOp1, regOp2);
523     return appendCallWithExceptionCheck(operation);
524 }
525
526 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int op1, SymbolTable* symbolTable, RegisterID regOp3)
527 {
528     setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), regOp3);
529     return appendCallWithExceptionCheck(operation);
530 }
531
532 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZSymtabJ operation, int op1, SymbolTable* symbolTable, RegisterID regOp3)
533 {
534     setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), regOp3);
535     return appendCallWithExceptionCheck(operation);
536 }
537
538 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp)
539 {
540     setupArgumentsWithExecState(regOp);
541     return appendCallWithExceptionCheck(operation);
542 }
543
544 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdZJ operation, RegisterID regOp1, const Identifier* identOp2, int32_t op3, RegisterID regOp4)
545 {
546     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), TrustedImm32(op3), regOp4);
547     return appendCallWithExceptionCheck(operation);
548 }
549
550 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdZJJ operation, RegisterID regOp1, const Identifier* identOp2, int32_t op3, RegisterID regOp4, RegisterID regOp5)
551 {
552     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), TrustedImm32(op3), regOp4, regOp5);
553     return appendCallWithExceptionCheck(operation);
554 }
555
556 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJZJ operation, RegisterID regOp1, RegisterID regOp2, int32_t op3, RegisterID regOp4)
557 {
558     setupArgumentsWithExecState(regOp1, regOp2, TrustedImm32(op3), regOp4);
559     return appendCallWithExceptionCheck(operation);
560 }
561
562 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1, int32_t op2)
563 {
564     setupArgumentsWithExecState(regOp1, TrustedImm32(op2));
565     return appendCallWithExceptionCheck(operation);
566 }
567
568 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1, int32_t op2, RegisterID regOp3)
569 {
570     setupArgumentsWithExecState(regOp1, TrustedImm32(op2), regOp3);
571     return appendCallWithExceptionCheck(operation);
572 }
573
574 #else // USE(JSVALUE32_64)
575
576 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
577 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
578 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
579 #define EABI_32BIT_DUMMY_ARG      TrustedImm32(0),
580 #else
581 #define EABI_32BIT_DUMMY_ARG
582 #endif
583
584 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
585 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
586 // is no other 32-bit value argument behind this 64-bit JSValue.
587 #if CPU(SH4)
588 #define SH4_32BIT_DUMMY_ARG      TrustedImm32(0),
589 #else
590 #define SH4_32BIT_DUMMY_ARG
591 #endif
592
593 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1Tag, GPRReg arg1Payload)
594 {
595     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
596     updateTopCallFrame();
597     return appendCall(operation);
598 }
599
600 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1Tag, GPRReg arg1Payload, int32_t arg2, int32_t arg3)
601 {
602     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImm32(arg2), TrustedImm32(arg3));
603     return appendCallWithExceptionCheck(operation);
604 }
605
606 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, int32_t arg3, GPRReg arg4)
607 {
608     setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, TrustedImm32(arg3), arg4);
609     return appendCallWithExceptionCheck(operation);
610 }
611     
612 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2Tag, GPRReg arg2Payload)
613 {
614     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2Payload, arg2Tag);
615     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
616 }
617
618 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload)
619 {
620     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
621     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
622 }
623
624 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1Tag, GPRReg arg1Payload, UniquedStringImpl* uid)
625 {
626     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1Payload, arg1Tag, TrustedImmPtr(uid));
627     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
628 }
629
630 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, const Identifier* arg2)
631 {
632     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
633     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
634 }
635
636 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
637 {
638     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
639     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
640 }
641
642 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJAp operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload, ArrayProfile* arrayProfile)
643 {
644     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag, TrustedImmPtr(arrayProfile));
645     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
646 }
647
648 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJBy operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload, ByValInfo* byValInfo)
649 {
650     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag, TrustedImmPtr(byValInfo));
651     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
652 }
653
654 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
655 {
656     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
657     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
658 }
659
660 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1Tag, GPRReg arg1Payload, size_t arg2)
661 {
662     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
663     return appendCallWithExceptionCheck(operation);
664 }
665
666 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID argTag, RegisterID argPayload)
667 {
668     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload, argTag);
669     return appendCallWithExceptionCheck(operation);
670 }
671
672 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID arg1Tag, RegisterID arg1Payload, RegisterID arg2Tag, RegisterID arg2Payload)
673 {
674     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
675     return appendCallWithExceptionCheck(operation);
676 }
677
678 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECIZC operation, RegisterID regOp1, const Identifier* identOp2, int32_t op3, RegisterID regOp4)
679 {
680     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), TrustedImm32(op3), regOp4);
681     return appendCallWithExceptionCheck(operation);
682 }
683
684 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECIZCC operation, RegisterID regOp1, const Identifier* identOp2, int32_t op3, RegisterID regOp4, RegisterID regOp5)
685 {
686     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), TrustedImm32(op3), regOp4, regOp5);
687     return appendCallWithExceptionCheck(operation);
688 }
689
690 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECJZC operation, RegisterID arg1, RegisterID arg2Tag, RegisterID arg2Payload, int32_t arg3, RegisterID arg4)
691 {
692     setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, TrustedImm32(arg3), arg4);
693     return appendCallWithExceptionCheck(operation);
694 }
695
696 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload)
697 {
698     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag);
699     return appendCallWithExceptionCheck(operation);
700 }
701
702 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int32_t op1, SymbolTable* symbolTable, RegisterID regOp3Tag, RegisterID regOp3Payload)
703 {
704     setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
705     return appendCallWithExceptionCheck(operation);
706 }
707
708 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, UniquedStringImpl* uid)
709 {
710     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1Payload, regOp1Tag, regOp2Payload, regOp2Tag, TrustedImmPtr(uid));
711     return appendCallWithExceptionCheck(operation);
712 }
713
714 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload)
715 {
716     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag);
717     return appendCallWithExceptionCheck(operation);
718 }
719
720 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJAp operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload, ArrayProfile* arrayProfile)
721 {
722     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag, TrustedImmPtr(arrayProfile));
723     return appendCallWithExceptionCheck(operation);
724 }
725
726 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJBy operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload, ByValInfo* byValInfo)
727 {
728     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag, TrustedImmPtr(byValInfo));
729     return appendCallWithExceptionCheck(operation);
730 }
731
732 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, RegisterID regOp1Tag, RegisterID regOp1Payload)
733 {
734     setupArgumentsWithExecState(TrustedImm32(dst), regOp1Payload, regOp1Tag);
735     return appendCallWithExceptionCheck(operation);
736 }
737
738 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2)
739 {
740     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2));
741     return appendCallWithExceptionCheck(operation);
742 }
743
744 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2, RegisterID regOp3Tag, RegisterID regOp3Payload)
745 {
746     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
747     return appendCallWithExceptionCheck(operation);
748 }
749     
750 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJscCJ operation, int dst, GPRReg arg1, JSCell* cell, GPRReg arg2Tag, GPRReg arg2Payload)
751 {
752     setupArgumentsWithExecState(arg1, TrustedImmPtr(cell), EABI_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
753     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
754 }
755
756 #undef EABI_32BIT_DUMMY_ARG
757 #undef SH4_32BIT_DUMMY_ARG
758
759 #endif // USE(JSVALUE32_64)
760
761 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
762 {
763     return branchStructure(NotEqual, Address(reg, JSCell::structureIDOffset()), structure);
764 }
765
766 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
767 {
768     if (!m_codeBlock->isKnownNotImmediate(vReg))
769         linkSlowCase(iter);
770 }
771
772 ALWAYS_INLINE void JIT::linkAllSlowCasesForBytecodeOffset(Vector<SlowCaseEntry>& slowCases, Vector<SlowCaseEntry>::iterator& iter, unsigned bytecodeOffset)
773 {
774     while (iter != slowCases.end() && iter->to == bytecodeOffset) {
775         iter->from.link(this);
776         ++iter;
777     }
778 }
779
780 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
781 {
782     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
783
784     m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
785 }
786
787 ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
788 {
789     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
790
791     const JumpList::JumpVector& jumpVector = jumpList.jumps();
792     size_t size = jumpVector.size();
793     for (size_t i = 0; i < size; ++i)
794         m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
795 }
796
797 ALWAYS_INLINE void JIT::addSlowCase()
798 {
799     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
800     
801     Jump emptyJump; // Doing it this way to make Windows happy.
802     m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
803 }
804
805 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
806 {
807     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
808
809     m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
810 }
811
812 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
813 {
814     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
815
816     jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
817 }
818
819 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellObject(RegisterID cellReg)
820 {
821     return branch8(AboveOrEqual, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
822 }
823
824 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellNotObject(RegisterID cellReg)
825 {
826     return branch8(Below, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
827 }
828
829 #if ENABLE(SAMPLING_FLAGS)
830 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
831 {
832     ASSERT(flag >= 1);
833     ASSERT(flag <= 32);
834     or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
835 }
836
837 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
838 {
839     ASSERT(flag >= 1);
840     ASSERT(flag <= 32);
841     and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
842 }
843 #endif
844
845 #if ENABLE(SAMPLING_COUNTERS)
846 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
847 {
848     add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
849 }
850 #endif
851
852 #if ENABLE(OPCODE_SAMPLING)
853 #if CPU(X86_64)
854 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
855 {
856     move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
857     storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
858 }
859 #else
860 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
861 {
862     storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
863 }
864 #endif
865 #endif
866
867 #if ENABLE(CODEBLOCK_SAMPLING)
868 #if CPU(X86_64)
869 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
870 {
871     move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
872     storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
873 }
874 #else
875 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
876 {
877     storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
878 }
879 #endif
880 #endif
881
882 ALWAYS_INLINE bool JIT::isOperandConstantChar(int src)
883 {
884     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
885 }
886
887 template<typename StructureType>
888 inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch)
889 {
890     loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result);
891     addSlowCase(branchTestPtr(Zero, result));
892
893     // remove the object from the free list
894     loadPtr(Address(result), scratch);
895     storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead()));
896
897     // initialize the object's property storage pointer
898     storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
899
900     // initialize the object's structure
901     emitStoreStructureWithTypeInfo(structure, result, scratch);
902 }
903
904 inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
905 {
906     ASSERT(shouldEmitProfiling());
907     ASSERT(valueProfile);
908
909     const RegisterID value = regT0;
910 #if USE(JSVALUE32_64)
911     const RegisterID valueTag = regT1;
912 #endif
913     
914     // We're in a simple configuration: only one bucket, so we can just do a direct
915     // store.
916 #if USE(JSVALUE64)
917     store64(value, valueProfile->m_buckets);
918 #else
919     EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
920     store32(value, &descriptor->asBits.payload);
921     store32(valueTag, &descriptor->asBits.tag);
922 #endif
923 }
924
925 inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
926 {
927     if (!shouldEmitProfiling())
928         return;
929     emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
930 }
931
932 inline void JIT::emitValueProfilingSite()
933 {
934     emitValueProfilingSite(m_bytecodeOffset);
935 }
936
937 inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile* arrayProfile)
938 {
939     if (shouldEmitProfiling()) {
940         load32(MacroAssembler::Address(cell, JSCell::structureIDOffset()), indexingType);
941         store32(indexingType, arrayProfile->addressOfLastSeenStructureID());
942     }
943
944     load8(Address(cell, JSCell::indexingTypeOffset()), indexingType);
945 }
946
947 inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex)
948 {
949     emitArrayProfilingSiteWithCell(cell, indexingType, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
950 }
951
952 inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
953 {
954     store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
955 }
956
957 inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
958 {
959     store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
960 }
961
962 static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
963 {
964     return arrayModesInclude(arrayModes, capability);
965 }
966
967 inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
968 {
969     ConcurrentJITLocker locker(m_codeBlock->m_lock);
970     profile->computeUpdatedPrediction(locker, m_codeBlock);
971     ArrayModes arrayModes = profile->observedArrayModes(locker);
972     if (arrayProfileSaw(arrayModes, DoubleShape))
973         return JITDouble;
974     if (arrayProfileSaw(arrayModes, Int32Shape))
975         return JITInt32;
976     if (arrayProfileSaw(arrayModes, ArrayStorageShape))
977         return JITArrayStorage;
978     return JITContiguous;
979 }
980
981 #if USE(JSVALUE32_64)
982
983 inline void JIT::emitLoadTag(int index, RegisterID tag)
984 {
985     if (m_codeBlock->isConstantRegisterIndex(index)) {
986         move(Imm32(getConstantOperand(index).tag()), tag);
987         return;
988     }
989
990     load32(tagFor(index), tag);
991 }
992
993 inline void JIT::emitLoadPayload(int index, RegisterID payload)
994 {
995     if (m_codeBlock->isConstantRegisterIndex(index)) {
996         move(Imm32(getConstantOperand(index).payload()), payload);
997         return;
998     }
999
1000     load32(payloadFor(index), payload);
1001 }
1002
1003 inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
1004 {
1005     move(Imm32(v.payload()), payload);
1006     move(Imm32(v.tag()), tag);
1007 }
1008
1009 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, JSValueRegs dst)
1010 {
1011     emitLoad(src, dst.tagGPR(), dst.payloadGPR());
1012 }
1013
1014 ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, JSValueRegs from)
1015 {
1016     emitStore(dst, from.tagGPR(), from.payloadGPR());
1017 }
1018
1019 inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
1020 {
1021     RELEASE_ASSERT(tag != payload);
1022
1023     if (base == callFrameRegister) {
1024         RELEASE_ASSERT(payload != base);
1025         emitLoadPayload(index, payload);
1026         emitLoadTag(index, tag);
1027         return;
1028     }
1029
1030     if (payload == base) { // avoid stomping base
1031         load32(tagFor(index, base), tag);
1032         load32(payloadFor(index, base), payload);
1033         return;
1034     }
1035
1036     load32(payloadFor(index, base), payload);
1037     load32(tagFor(index, base), tag);
1038 }
1039
1040 inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
1041 {
1042     emitLoad(index2, tag2, payload2);
1043     emitLoad(index1, tag1, payload1);
1044 }
1045
1046 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
1047 {
1048     if (m_codeBlock->isConstantRegisterIndex(index)) {
1049         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
1050         loadDouble(TrustedImmPtr(&inConstantPool), value);
1051     } else
1052         loadDouble(addressFor(index), value);
1053 }
1054
1055 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
1056 {
1057     if (m_codeBlock->isConstantRegisterIndex(index)) {
1058         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
1059         char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
1060         convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
1061     } else
1062         convertInt32ToDouble(payloadFor(index), value);
1063 }
1064
1065 inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
1066 {
1067     store32(payload, payloadFor(index, base));
1068     store32(tag, tagFor(index, base));
1069 }
1070
1071 inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
1072 {
1073     store32(payload, payloadFor(index, callFrameRegister));
1074     if (!indexIsInt32)
1075         store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
1076 }
1077
1078 inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
1079 {
1080     store32(payload, payloadFor(index, callFrameRegister));
1081     if (!indexIsInt32)
1082         store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
1083 }
1084
1085 inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
1086 {
1087     store32(payload, payloadFor(index, callFrameRegister));
1088     if (!indexIsCell)
1089         store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
1090 }
1091
1092 inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
1093 {
1094     store32(payload, payloadFor(index, callFrameRegister));
1095     if (!indexIsBool)
1096         store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
1097 }
1098
1099 inline void JIT::emitStoreDouble(int index, FPRegisterID value)
1100 {
1101     storeDouble(value, addressFor(index));
1102 }
1103
1104 inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
1105 {
1106     store32(Imm32(constant.payload()), payloadFor(index, base));
1107     store32(Imm32(constant.tag()), tagFor(index, base));
1108 }
1109
1110 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
1111 {
1112     emitStore(dst, jsUndefined());
1113 }
1114
1115 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
1116 {
1117     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
1118         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
1119             addSlowCase(jump());
1120         else
1121             addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
1122     }
1123 }
1124
1125 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
1126 {
1127     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
1128         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
1129             addSlowCase(jump());
1130         else
1131             addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
1132     }
1133 }
1134
1135 ALWAYS_INLINE bool JIT::isOperandConstantInt(int src)
1136 {
1137     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1138 }
1139
1140 ALWAYS_INLINE bool JIT::getOperandConstantInt(int op1, int op2, int& op, int32_t& constant)
1141 {
1142     if (isOperandConstantInt(op1)) {
1143         constant = getConstantOperand(op1).asInt32();
1144         op = op2;
1145         return true;
1146     }
1147
1148     if (isOperandConstantInt(op2)) {
1149         constant = getConstantOperand(op2).asInt32();
1150         op = op1;
1151         return true;
1152     }
1153     
1154     return false;
1155 }
1156
1157 #else // USE(JSVALUE32_64)
1158
1159 // get arg puts an arg from the SF register array into a h/w register
1160 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
1161 {
1162     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
1163
1164     // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
1165     if (m_codeBlock->isConstantRegisterIndex(src)) {
1166         JSValue value = m_codeBlock->getConstant(src);
1167         if (!value.isNumber())
1168             move(TrustedImm64(JSValue::encode(value)), dst);
1169         else
1170             move(Imm64(JSValue::encode(value)), dst);
1171         return;
1172     }
1173
1174     load64(Address(callFrameRegister, src * sizeof(Register)), dst);
1175 }
1176
1177 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, JSValueRegs dst)
1178 {
1179     emitGetVirtualRegister(src, dst.payloadGPR());
1180 }
1181
1182 ALWAYS_INLINE void JIT::emitGetVirtualRegister(VirtualRegister src, RegisterID dst)
1183 {
1184     emitGetVirtualRegister(src.offset(), dst);
1185 }
1186
1187 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
1188 {
1189     emitGetVirtualRegister(src1, dst1);
1190     emitGetVirtualRegister(src2, dst2);
1191 }
1192
1193 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2)
1194 {
1195     emitGetVirtualRegisters(src1.offset(), dst1, src2.offset(), dst2);
1196 }
1197
1198 ALWAYS_INLINE int32_t JIT::getOperandConstantInt(int src)
1199 {
1200     return getConstantOperand(src).asInt32();
1201 }
1202
1203 ALWAYS_INLINE bool JIT::isOperandConstantInt(int src)
1204 {
1205     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1206 }
1207
1208 ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, RegisterID from)
1209 {
1210     store64(from, Address(callFrameRegister, dst * sizeof(Register)));
1211 }
1212
1213 ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, JSValueRegs from)
1214 {
1215     emitPutVirtualRegister(dst, from.payloadGPR());
1216 }
1217
1218 ALWAYS_INLINE void JIT::emitPutVirtualRegister(VirtualRegister dst, RegisterID from)
1219 {
1220     emitPutVirtualRegister(dst.offset(), from);
1221 }
1222
1223 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
1224 {
1225     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
1226 }
1227
1228 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
1229 {
1230     return branchTest64(Zero, reg, tagMaskRegister);
1231 }
1232
1233 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1234 {
1235     move(reg1, scratch);
1236     or64(reg2, scratch);
1237     return emitJumpIfJSCell(scratch);
1238 }
1239
1240 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
1241 {
1242     addSlowCase(emitJumpIfJSCell(reg));
1243 }
1244
1245 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
1246 {
1247     addSlowCase(emitJumpIfNotJSCell(reg));
1248 }
1249
1250 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
1251 {
1252     if (!m_codeBlock->isKnownNotImmediate(vReg))
1253         emitJumpSlowCaseIfNotJSCell(reg);
1254 }
1255
1256 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
1257 {
1258     if (m_codeBlock->isConstantRegisterIndex(index)) {
1259         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
1260         loadDouble(TrustedImmPtr(&inConstantPool), value);
1261     } else
1262         loadDouble(addressFor(index), value);
1263 }
1264
1265 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
1266 {
1267     if (m_codeBlock->isConstantRegisterIndex(index)) {
1268         ASSERT(isOperandConstantInt(index));
1269         convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
1270     } else
1271         convertInt32ToDouble(addressFor(index), value);
1272 }
1273
1274 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfInt(RegisterID reg)
1275 {
1276     return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
1277 }
1278
1279 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotInt(RegisterID reg)
1280 {
1281     return branch64(Below, reg, tagTypeNumberRegister);
1282 }
1283
1284 ALWAYS_INLINE JIT::PatchableJump JIT::emitPatchableJumpIfNotInt(RegisterID reg)
1285 {
1286     return patchableBranch64(Below, reg, tagTypeNumberRegister);
1287 }
1288
1289 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotInt(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1290 {
1291     move(reg1, scratch);
1292     and64(reg2, scratch);
1293     return emitJumpIfNotInt(scratch);
1294 }
1295
1296 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotInt(RegisterID reg)
1297 {
1298     addSlowCase(emitJumpIfNotInt(reg));
1299 }
1300
1301 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotInt(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1302 {
1303     addSlowCase(emitJumpIfNotInt(reg1, reg2, scratch));
1304 }
1305
1306 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotNumber(RegisterID reg)
1307 {
1308     addSlowCase(emitJumpIfNotNumber(reg));
1309 }
1310
1311 ALWAYS_INLINE void JIT::emitTagBool(RegisterID reg)
1312 {
1313     or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
1314 }
1315
1316 #endif // USE(JSVALUE32_64)
1317
1318 } // namespace JSC
1319
1320 #endif // ENABLE(JIT)
1321
1322 #endif // JITInlines_h
1323