54603b6476ce3495e4cd300f45a87ed5b3f058c4
[WebKit-https.git] / Source / JavaScriptCore / jit / JITInlines.h
1 /*
2  * Copyright (C) 2008, 2012, 2013, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JITInlines_h
27 #define JITInlines_h
28
29 #if ENABLE(JIT)
30
31 #include "JSCInlines.h"
32
33 namespace JSC {
34
35 #if USE(JSVALUE64)
36 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
37 {
38     JumpList slowCases = emitDoubleLoad(instruction, badType);
39     moveDoubleTo64(fpRegT0, regT0);
40     sub64(tagTypeNumberRegister, regT0);
41     return slowCases;
42 }
43 #else
44 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
45 {
46     JumpList slowCases = emitDoubleLoad(instruction, badType);
47     moveDoubleToInts(fpRegT0, regT0, regT1);
48     return slowCases;
49 }
50 #endif // USE(JSVALUE64)
51
52 ALWAYS_INLINE MacroAssembler::JumpList JIT::emitLoadForArrayMode(Instruction* currentInstruction, JITArrayMode arrayMode, PatchableJump& badType)
53 {
54     switch (arrayMode) {
55     case JITInt32:
56         return emitInt32Load(currentInstruction, badType);
57     case JITDouble:
58         return emitDoubleLoad(currentInstruction, badType);
59     case JITContiguous:
60         return emitContiguousLoad(currentInstruction, badType);
61     case JITArrayStorage:
62         return emitArrayStorageLoad(currentInstruction, badType);
63     default:
64         break;
65     }
66     RELEASE_ASSERT_NOT_REACHED();
67     return MacroAssembler::JumpList();
68 }
69
70 inline MacroAssembler::JumpList JIT::emitContiguousGetByVal(Instruction* instruction, PatchableJump& badType, IndexingType expectedShape)
71 {
72     return emitContiguousLoad(instruction, badType, expectedShape);
73 }
74
75 inline MacroAssembler::JumpList JIT::emitArrayStorageGetByVal(Instruction* instruction, PatchableJump& badType)
76 {
77     return emitArrayStorageLoad(instruction, badType);
78 }
79
80 ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(int src)
81 {
82     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
83 }
84
85 ALWAYS_INLINE JSValue JIT::getConstantOperand(int src)
86 {
87     ASSERT(m_codeBlock->isConstantRegisterIndex(src));
88     return m_codeBlock->getConstant(src);
89 }
90
91 ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
92 {
93 #if USE(JSVALUE32_64)
94     store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
95     store32(from, intPayloadFor(entry, callFrameRegister));
96 #else
97     store64(from, addressFor(entry, callFrameRegister));
98 #endif
99 }
100
101 ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
102 {
103     failures.append(branchStructure(NotEqual, Address(src, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
104     failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
105     loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
106     failures.append(branchTest32(Zero, dst));
107     loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1);
108     loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst);
109
110     JumpList is16Bit;
111     JumpList cont8Bit;
112     is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
113     load8(MacroAssembler::Address(dst, 0), dst);
114     cont8Bit.append(jump());
115     is16Bit.link(this);
116     load16(MacroAssembler::Address(dst, 0), dst);
117     cont8Bit.link(this);
118 }
119
120 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
121 {
122     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
123     Call nakedCall = nearCall();
124     m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
125     return nakedCall;
126 }
127
128 ALWAYS_INLINE void JIT::updateTopCallFrame()
129 {
130     ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
131 #if USE(JSVALUE32_64)
132     Instruction* instruction = m_codeBlock->instructions().begin() + m_bytecodeOffset + 1; 
133     uint32_t locationBits = CallFrame::Location::encodeAsBytecodeInstruction(instruction);
134 #else
135     uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(m_bytecodeOffset + 1);
136 #endif
137     store32(TrustedImm32(locationBits), intTagFor(JSStack::ArgumentCount));
138     storePtr(callFrameRegister, &m_vm->topCallFrame);
139 }
140
141 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr& function)
142 {
143     updateTopCallFrame();
144     MacroAssembler::Call call = appendCall(function);
145     exceptionCheck();
146     return call;
147 }
148
149 #if OS(WINDOWS) && CPU(X86_64)
150 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr& function)
151 {
152     updateTopCallFrame();
153     MacroAssembler::Call call = appendCallWithSlowPathReturnType(function);
154     exceptionCheck();
155     return call;
156 }
157 #endif
158
159 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr& function)
160 {
161     updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
162     MacroAssembler::Call call = appendCall(function);
163     exceptionCheckWithCallFrameRollback();
164     return call;
165 }
166
167 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr& function, int dst)
168 {
169     MacroAssembler::Call call = appendCallWithExceptionCheck(function);
170 #if USE(JSVALUE64)
171     emitPutVirtualRegister(dst, returnValueGPR);
172 #else
173     emitStore(dst, returnValueGPR2, returnValueGPR);
174 #endif
175     return call;
176 }
177
178 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr& function, int dst)
179 {
180     MacroAssembler::Call call = appendCallWithExceptionCheck(function);
181     emitValueProfilingSite();
182 #if USE(JSVALUE64)
183     emitPutVirtualRegister(dst, returnValueGPR);
184 #else
185     emitStore(dst, returnValueGPR2, returnValueGPR);
186 #endif
187     return call;
188 }
189
190 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_E operation)
191 {
192     setupArgumentsExecState();
193     return appendCallWithExceptionCheck(operation);
194 }
195
196 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJsc operation, GPRReg arg1)
197 {
198     setupArgumentsWithExecState(arg1);
199     return appendCallWithExceptionCheck(operation);
200 }
201
202 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJscZ operation, GPRReg arg1, int32_t arg2)
203 {
204     setupArgumentsWithExecState(arg1, TrustedImm32(arg2));
205     return appendCallWithExceptionCheck(operation);
206 }
207
208 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, GPRReg arg1)
209 {
210     setupArgumentsWithExecState(arg1);
211     return appendCallWithExceptionCheck(operation);
212 }
213     
214 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, TrustedImmPtr arg1)
215 {
216     setupArgumentsWithExecState(arg1);
217     return appendCallWithExceptionCheck(operation);
218 }
219     
220 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EO operation, GPRReg arg)
221 {
222     setupArgumentsWithExecState(arg);
223     return appendCallWithExceptionCheck(operation);
224 }
225
226 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_ESt operation, Structure* structure)
227 {
228     setupArgumentsWithExecState(TrustedImmPtr(structure));
229     return appendCallWithExceptionCheck(operation);
230 }
231
232 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EZ operation, int32_t arg)
233 {
234     setupArgumentsWithExecState(TrustedImm32(arg));
235     return appendCallWithExceptionCheck(operation);
236 }
237
238 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_E operation, int dst)
239 {
240     setupArgumentsExecState();
241     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
242 }
243
244 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2, int32_t arg3)
245 {
246     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2, TrustedImm32(arg3));
247     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
248 }
249
250 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, const JSValue* arg2, int32_t arg3)
251 {
252     setupArgumentsWithExecState(TrustedImmPtr(arg1), TrustedImmPtr(arg2), TrustedImm32(arg3));
253     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
254 }
255
256 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EC operation, int dst, JSCell* cell)
257 {
258     setupArgumentsWithExecState(TrustedImmPtr(cell));
259     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
260 }
261
262 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, JSCell* cell)
263 {
264     setupArgumentsWithExecState(TrustedImmPtr(cell));
265     return appendCallWithExceptionCheck(operation);
266 }
267
268 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJscC operation, int dst, GPRReg arg1, JSCell* cell)
269 {
270     setupArgumentsWithExecState(arg1, TrustedImmPtr(cell));
271     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
272 }
273
274 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EP operation, int dst, void* pointer)
275 {
276     setupArgumentsWithExecState(TrustedImmPtr(pointer));
277     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
278 }
279
280 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(WithProfileTag, J_JITOperation_EPc operation, int dst, Instruction* bytecodePC)
281 {
282     setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
283     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
284 }
285
286 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZ operation, int dst, int32_t arg)
287 {
288     setupArgumentsWithExecState(TrustedImm32(arg));
289     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
290 }
291
292 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZZ operation, int dst, int32_t arg1, int32_t arg2)
293 {
294     setupArgumentsWithExecState(TrustedImm32(arg1), TrustedImm32(arg2));
295     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
296 }
297
298 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
299 {
300     setupArgumentsWithExecState(regOp1, regOp2);
301     return appendCallWithExceptionCheck(operation);
302 }
303
304 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EOJss operation, RegisterID regOp1, RegisterID regOp2)
305 {
306     setupArgumentsWithExecState(regOp1, regOp2);
307     return appendCallWithExceptionCheck(operation);
308 }
309
310 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Sprt_JITOperation_EZ operation, int32_t op)
311 {
312 #if OS(WINDOWS) && CPU(X86_64)
313     setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op));
314     return appendCallWithExceptionCheckAndSlowPathReturnType(operation);
315 #else
316     setupArgumentsWithExecState(TrustedImm32(op));
317     return appendCallWithExceptionCheck(operation);
318 #endif
319 }
320
321 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_E operation)
322 {
323     setupArgumentsExecState();
324     return appendCallWithExceptionCheck(operation);
325 }
326
327 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, RegisterID regOp)
328 {
329     setupArgumentsWithExecState(regOp);
330     return appendCallWithExceptionCheck(operation);
331 }
332
333 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
334 {
335     setupArgumentsWithExecState(regOp1, regOp2);
336     return appendCallWithExceptionCheck(operation);
337 }
338
339 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EE operation, RegisterID regOp)
340 {
341     setupArgumentsWithExecState(regOp);
342     updateTopCallFrame();
343     return appendCallWithExceptionCheck(operation);
344 }
345
346 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EPc operation, Instruction* bytecodePC)
347 {
348     setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
349     return appendCallWithExceptionCheck(operation);
350 }
351
352 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZ operation, int32_t op)
353 {
354     setupArgumentsWithExecState(TrustedImm32(op));
355     return appendCallWithExceptionCheck(operation);
356 }
357
358 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation)
359 {
360     setupArgumentsExecState();
361     return appendCallWithCallFrameRollbackOnException(operation);
362 }
363
364 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation, CodeBlock* pointer)
365 {
366     setupArgumentsWithExecState(TrustedImmPtr(pointer));
367     return appendCallWithCallFrameRollbackOnException(operation);
368 }
369
370 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation)
371 {
372     setupArgumentsExecState();
373     return appendCallWithCallFrameRollbackOnException(operation);
374 }
375
376
377 #if USE(JSVALUE64)
378 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1, int32_t arg2, int32_t arg3)
379 {
380     setupArgumentsWithExecState(arg1, TrustedImm32(arg2), TrustedImm32(arg3));
381     return appendCallWithExceptionCheck(operation);
382 }
383
384 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2, int32_t arg3, GPRReg arg4)
385 {
386     setupArgumentsWithExecState(arg1, arg2, TrustedImm32(arg3), arg4);
387     return appendCallWithExceptionCheck(operation);
388 }
389
390 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1, RegisterID regOp2, StringImpl* uid)
391 {
392     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1, regOp2, TrustedImmPtr(uid));
393     return appendCallWithExceptionCheck(operation);
394 }
395
396 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3)
397 {
398     setupArgumentsWithExecState(regOp1, regOp2, regOp3);
399     return appendCallWithExceptionCheck(operation);
400 }
401
402 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, GPRReg arg)
403 {
404     setupArgumentsWithExecState(TrustedImm32(dst), arg);
405     return appendCallWithExceptionCheck(operation);
406 }
407
408 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1, StringImpl* uid)
409 {
410     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1, TrustedImmPtr(uid));
411     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
412 }
413
414 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
415 {
416     setupArgumentsWithExecState(arg1, arg2);
417     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
418 }
419
420 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2)
421 {
422     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2);
423     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
424 }
425
426 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1)
427 {
428     setupArgumentsWithExecState(arg1);
429     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
430 }
431
432 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1, const Identifier* arg2)
433 {
434     setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
435     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
436 }
437
438 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
439 {
440     setupArgumentsWithExecState(arg1, arg2);
441     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
442 }
443
444 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1)
445 {
446     setupArgumentsWithExecState(arg1);
447     updateTopCallFrame();
448     return appendCall(operation);
449 }
450
451 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1, size_t arg2)
452 {
453     setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
454     return appendCallWithExceptionCheck(operation);
455 }
456
457 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID regOp)
458 {
459     setupArgumentsWithExecState(regOp);
460     return appendCallWithExceptionCheck(operation);
461 }
462
463 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID regOp1, RegisterID regOp2)
464 {
465     setupArgumentsWithExecState(regOp1, regOp2);
466     return appendCallWithExceptionCheck(operation);
467 }
468
469 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int op1, SymbolTable* symbolTable, RegisterID regOp3)
470 {
471     setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), regOp3);
472     return appendCallWithExceptionCheck(operation);
473 }
474
475 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp)
476 {
477     setupArgumentsWithExecState(regOp);
478     return appendCallWithExceptionCheck(operation);
479 }
480
481 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3)
482 {
483     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3);
484     return appendCallWithExceptionCheck(operation);
485 }
486
487 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
488 {
489     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
490     return appendCallWithExceptionCheck(operation);
491 }
492
493 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1, int32_t op2)
494 {
495     setupArgumentsWithExecState(regOp1, TrustedImm32(op2));
496     return appendCallWithExceptionCheck(operation);
497 }
498
499 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1, int32_t op2, RegisterID regOp3)
500 {
501     setupArgumentsWithExecState(regOp1, TrustedImm32(op2), regOp3);
502     return appendCallWithExceptionCheck(operation);
503 }
504
505 #else // USE(JSVALUE32_64)
506
507 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
508 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
509 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
510 #define EABI_32BIT_DUMMY_ARG      TrustedImm32(0),
511 #else
512 #define EABI_32BIT_DUMMY_ARG
513 #endif
514
515 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
516 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
517 // is no other 32-bit value argument behind this 64-bit JSValue.
518 #if CPU(SH4)
519 #define SH4_32BIT_DUMMY_ARG      TrustedImm32(0),
520 #else
521 #define SH4_32BIT_DUMMY_ARG
522 #endif
523
524 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1Tag, GPRReg arg1Payload)
525 {
526     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
527     updateTopCallFrame();
528     return appendCall(operation);
529 }
530
531 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1Tag, GPRReg arg1Payload, int32_t arg2, int32_t arg3)
532 {
533     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImm32(arg2), TrustedImm32(arg3));
534     return appendCallWithExceptionCheck(operation);
535 }
536
537 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, int32_t arg3, GPRReg arg4)
538 {
539     setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, TrustedImm32(arg3), arg4);
540     return appendCallWithExceptionCheck(operation);
541 }
542     
543 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2Tag, GPRReg arg2Payload)
544 {
545     setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2Payload, arg2Tag);
546     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
547 }
548
549 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload)
550 {
551     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
552     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
553 }
554
555 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1Tag, GPRReg arg1Payload, StringImpl* uid)
556 {
557     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1Payload, arg1Tag, TrustedImmPtr(uid));
558     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
559 }
560
561 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, const Identifier* arg2)
562 {
563     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
564     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
565 }
566
567 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
568 {
569     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
570     return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
571 }
572
573 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
574 {
575     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
576     return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
577 }
578
579 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1Tag, GPRReg arg1Payload, size_t arg2)
580 {
581     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
582     return appendCallWithExceptionCheck(operation);
583 }
584
585 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID argTag, RegisterID argPayload)
586 {
587     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload, argTag);
588     return appendCallWithExceptionCheck(operation);
589 }
590
591 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID arg1Tag, RegisterID arg1Payload, RegisterID arg2Tag, RegisterID arg2Payload)
592 {
593     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
594     return appendCallWithExceptionCheck(operation);
595 }
596
597 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECIC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3)
598 {
599     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3);
600     return appendCallWithExceptionCheck(operation);
601 }
602
603 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECICC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
604 {
605     setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
606     return appendCallWithExceptionCheck(operation);
607 }
608
609 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload)
610 {
611     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag);
612     return appendCallWithExceptionCheck(operation);
613 }
614
615 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int32_t op1, SymbolTable* symbolTable, RegisterID regOp3Tag, RegisterID regOp3Payload)
616 {
617     setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
618     return appendCallWithExceptionCheck(operation);
619 }
620
621 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, StringImpl* uid)
622 {
623     setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1Payload, regOp1Tag, regOp2Payload, regOp2Tag, TrustedImmPtr(uid));
624     return appendCallWithExceptionCheck(operation);
625 }
626
627 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload)
628 {
629     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag);
630     return appendCallWithExceptionCheck(operation);
631 }
632
633 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, RegisterID regOp1Tag, RegisterID regOp1Payload)
634 {
635     setupArgumentsWithExecState(TrustedImm32(dst), regOp1Payload, regOp1Tag);
636     return appendCallWithExceptionCheck(operation);
637 }
638
639 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2)
640 {
641     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2));
642     return appendCallWithExceptionCheck(operation);
643 }
644
645 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2, RegisterID regOp3Tag, RegisterID regOp3Payload)
646 {
647     setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
648     return appendCallWithExceptionCheck(operation);
649 }
650
651 #undef EABI_32BIT_DUMMY_ARG
652 #undef SH4_32BIT_DUMMY_ARG
653
654 #endif // USE(JSVALUE32_64)
655
656 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
657 {
658     return branchStructure(NotEqual, Address(reg, JSCell::structureIDOffset()), structure);
659 }
660
661 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
662 {
663     if (!m_codeBlock->isKnownNotImmediate(vReg))
664         linkSlowCase(iter);
665 }
666
667 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
668 {
669     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
670
671     m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
672 }
673
674 ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
675 {
676     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
677
678     const JumpList::JumpVector& jumpVector = jumpList.jumps();
679     size_t size = jumpVector.size();
680     for (size_t i = 0; i < size; ++i)
681         m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
682 }
683
684 ALWAYS_INLINE void JIT::addSlowCase()
685 {
686     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
687     
688     Jump emptyJump; // Doing it this way to make Windows happy.
689     m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
690 }
691
692 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
693 {
694     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
695
696     m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
697 }
698
699 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
700 {
701     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
702
703     jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
704 }
705
706 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellObject(RegisterID cellReg)
707 {
708     return branch8(AboveOrEqual, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
709 }
710
711 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellNotObject(RegisterID cellReg)
712 {
713     return branch8(Below, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
714 }
715
716 #if ENABLE(SAMPLING_FLAGS)
717 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
718 {
719     ASSERT(flag >= 1);
720     ASSERT(flag <= 32);
721     or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
722 }
723
724 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
725 {
726     ASSERT(flag >= 1);
727     ASSERT(flag <= 32);
728     and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
729 }
730 #endif
731
732 #if ENABLE(SAMPLING_COUNTERS)
733 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
734 {
735     add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
736 }
737 #endif
738
739 #if ENABLE(OPCODE_SAMPLING)
740 #if CPU(X86_64)
741 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
742 {
743     move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
744     storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
745 }
746 #else
747 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
748 {
749     storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
750 }
751 #endif
752 #endif
753
754 #if ENABLE(CODEBLOCK_SAMPLING)
755 #if CPU(X86_64)
756 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
757 {
758     move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
759     storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
760 }
761 #else
762 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
763 {
764     storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
765 }
766 #endif
767 #endif
768
769 ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(int src)
770 {
771     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
772 }
773
774 template<typename StructureType>
775 inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch)
776 {
777     loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result);
778     addSlowCase(branchTestPtr(Zero, result));
779
780     // remove the object from the free list
781     loadPtr(Address(result), scratch);
782     storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead()));
783
784     // initialize the object's property storage pointer
785     storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
786
787     // initialize the object's structure
788     emitStoreStructureWithTypeInfo(structure, result, scratch);
789 }
790
791 inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
792 {
793     ASSERT(shouldEmitProfiling());
794     ASSERT(valueProfile);
795
796     const RegisterID value = regT0;
797 #if USE(JSVALUE32_64)
798     const RegisterID valueTag = regT1;
799 #endif
800     
801     // We're in a simple configuration: only one bucket, so we can just do a direct
802     // store.
803 #if USE(JSVALUE64)
804     store64(value, valueProfile->m_buckets);
805 #else
806     EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
807     store32(value, &descriptor->asBits.payload);
808     store32(valueTag, &descriptor->asBits.tag);
809 #endif
810 }
811
812 inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
813 {
814     if (!shouldEmitProfiling())
815         return;
816     emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
817 }
818
819 inline void JIT::emitValueProfilingSite()
820 {
821     emitValueProfilingSite(m_bytecodeOffset);
822 }
823
824 inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile* arrayProfile)
825 {
826     if (shouldEmitProfiling()) {
827         load32(MacroAssembler::Address(cell, JSCell::structureIDOffset()), indexingType);
828         store32(indexingType, arrayProfile->addressOfLastSeenStructureID());
829     }
830
831     load8(Address(cell, JSCell::indexingTypeOffset()), indexingType);
832 }
833
834 inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex)
835 {
836     emitArrayProfilingSiteWithCell(cell, indexingType, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
837 }
838
839 inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
840 {
841     store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
842 }
843
844 inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
845 {
846     store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
847 }
848
849 static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
850 {
851     return arrayModesInclude(arrayModes, capability);
852 }
853
854 inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
855 {
856     ConcurrentJITLocker locker(m_codeBlock->m_lock);
857     profile->computeUpdatedPrediction(locker, m_codeBlock);
858     ArrayModes arrayModes = profile->observedArrayModes(locker);
859     if (arrayProfileSaw(arrayModes, DoubleShape))
860         return JITDouble;
861     if (arrayProfileSaw(arrayModes, Int32Shape))
862         return JITInt32;
863     if (arrayProfileSaw(arrayModes, ArrayStorageShape))
864         return JITArrayStorage;
865     return JITContiguous;
866 }
867
868 #if USE(JSVALUE32_64)
869
870 inline void JIT::emitLoadTag(int index, RegisterID tag)
871 {
872     if (m_codeBlock->isConstantRegisterIndex(index)) {
873         move(Imm32(getConstantOperand(index).tag()), tag);
874         return;
875     }
876
877     load32(tagFor(index), tag);
878 }
879
880 inline void JIT::emitLoadPayload(int index, RegisterID payload)
881 {
882     if (m_codeBlock->isConstantRegisterIndex(index)) {
883         move(Imm32(getConstantOperand(index).payload()), payload);
884         return;
885     }
886
887     load32(payloadFor(index), payload);
888 }
889
890 inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
891 {
892     move(Imm32(v.payload()), payload);
893     move(Imm32(v.tag()), tag);
894 }
895
896 inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
897 {
898     RELEASE_ASSERT(tag != payload);
899
900     if (base == callFrameRegister) {
901         RELEASE_ASSERT(payload != base);
902         emitLoadPayload(index, payload);
903         emitLoadTag(index, tag);
904         return;
905     }
906
907     if (payload == base) { // avoid stomping base
908         load32(tagFor(index, base), tag);
909         load32(payloadFor(index, base), payload);
910         return;
911     }
912
913     load32(payloadFor(index, base), payload);
914     load32(tagFor(index, base), tag);
915 }
916
917 inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
918 {
919     emitLoad(index2, tag2, payload2);
920     emitLoad(index1, tag1, payload1);
921 }
922
923 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
924 {
925     if (m_codeBlock->isConstantRegisterIndex(index)) {
926         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
927         loadDouble(TrustedImmPtr(&inConstantPool), value);
928     } else
929         loadDouble(addressFor(index), value);
930 }
931
932 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
933 {
934     if (m_codeBlock->isConstantRegisterIndex(index)) {
935         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
936         char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
937         convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
938     } else
939         convertInt32ToDouble(payloadFor(index), value);
940 }
941
942 inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
943 {
944     store32(payload, payloadFor(index, base));
945     store32(tag, tagFor(index, base));
946 }
947
948 inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
949 {
950     store32(payload, payloadFor(index, callFrameRegister));
951     if (!indexIsInt32)
952         store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
953 }
954
955 inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
956 {
957     store32(payload, payloadFor(index, callFrameRegister));
958     if (!indexIsInt32)
959         store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
960 }
961
962 inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
963 {
964     store32(payload, payloadFor(index, callFrameRegister));
965     if (!indexIsCell)
966         store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
967 }
968
969 inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
970 {
971     store32(payload, payloadFor(index, callFrameRegister));
972     if (!indexIsBool)
973         store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
974 }
975
976 inline void JIT::emitStoreDouble(int index, FPRegisterID value)
977 {
978     storeDouble(value, addressFor(index));
979 }
980
981 inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
982 {
983     store32(Imm32(constant.payload()), payloadFor(index, base));
984     store32(Imm32(constant.tag()), tagFor(index, base));
985 }
986
987 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
988 {
989     emitStore(dst, jsUndefined());
990 }
991
992 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
993 {
994     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
995         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
996             addSlowCase(jump());
997         else
998             addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
999     }
1000 }
1001
1002 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
1003 {
1004     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
1005         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
1006             addSlowCase(jump());
1007         else
1008             addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
1009     }
1010 }
1011
1012 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src)
1013 {
1014     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1015 }
1016
1017 ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(int op1, int op2, int& op, int32_t& constant)
1018 {
1019     if (isOperandConstantImmediateInt(op1)) {
1020         constant = getConstantOperand(op1).asInt32();
1021         op = op2;
1022         return true;
1023     }
1024
1025     if (isOperandConstantImmediateInt(op2)) {
1026         constant = getConstantOperand(op2).asInt32();
1027         op = op1;
1028         return true;
1029     }
1030     
1031     return false;
1032 }
1033
1034 #else // USE(JSVALUE32_64)
1035
1036 // get arg puts an arg from the SF register array into a h/w register
1037 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
1038 {
1039     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
1040
1041     // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
1042     if (m_codeBlock->isConstantRegisterIndex(src)) {
1043         JSValue value = m_codeBlock->getConstant(src);
1044         if (!value.isNumber())
1045             move(TrustedImm64(JSValue::encode(value)), dst);
1046         else
1047             move(Imm64(JSValue::encode(value)), dst);
1048         return;
1049     }
1050
1051     load64(Address(callFrameRegister, src * sizeof(Register)), dst);
1052 }
1053
1054 ALWAYS_INLINE void JIT::emitGetVirtualRegister(VirtualRegister src, RegisterID dst)
1055 {
1056     emitGetVirtualRegister(src.offset(), dst);
1057 }
1058
1059 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
1060 {
1061     emitGetVirtualRegister(src1, dst1);
1062     emitGetVirtualRegister(src2, dst2);
1063 }
1064
1065 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2)
1066 {
1067     emitGetVirtualRegisters(src1.offset(), dst1, src2.offset(), dst2);
1068 }
1069
1070 ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(int src)
1071 {
1072     return getConstantOperand(src).asInt32();
1073 }
1074
1075 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src)
1076 {
1077     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1078 }
1079
1080 ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, RegisterID from)
1081 {
1082     store64(from, Address(callFrameRegister, dst * sizeof(Register)));
1083 }
1084
1085 ALWAYS_INLINE void JIT::emitPutVirtualRegister(VirtualRegister dst, RegisterID from)
1086 {
1087     emitPutVirtualRegister(dst.offset(), from);
1088 }
1089
1090 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
1091 {
1092     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
1093 }
1094
1095 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
1096 {
1097     return branchTest64(Zero, reg, tagMaskRegister);
1098 }
1099
1100 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1101 {
1102     move(reg1, scratch);
1103     or64(reg2, scratch);
1104     return emitJumpIfJSCell(scratch);
1105 }
1106
1107 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
1108 {
1109     addSlowCase(emitJumpIfJSCell(reg));
1110 }
1111
1112 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
1113 {
1114     addSlowCase(emitJumpIfNotJSCell(reg));
1115 }
1116
1117 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
1118 {
1119     if (!m_codeBlock->isKnownNotImmediate(vReg))
1120         emitJumpSlowCaseIfNotJSCell(reg);
1121 }
1122
1123 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
1124 {
1125     if (m_codeBlock->isConstantRegisterIndex(index)) {
1126         WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
1127         loadDouble(TrustedImmPtr(&inConstantPool), value);
1128     } else
1129         loadDouble(addressFor(index), value);
1130 }
1131
1132 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
1133 {
1134     if (m_codeBlock->isConstantRegisterIndex(index)) {
1135         ASSERT(isOperandConstantImmediateInt(index));
1136         convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
1137     } else
1138         convertInt32ToDouble(addressFor(index), value);
1139 }
1140
1141 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
1142 {
1143     return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
1144 }
1145
1146 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
1147 {
1148     return branch64(Below, reg, tagTypeNumberRegister);
1149 }
1150
1151 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1152 {
1153     move(reg1, scratch);
1154     and64(reg2, scratch);
1155     return emitJumpIfNotImmediateInteger(scratch);
1156 }
1157
1158 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
1159 {
1160     addSlowCase(emitJumpIfNotImmediateInteger(reg));
1161 }
1162
1163 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1164 {
1165     addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
1166 }
1167
1168 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
1169 {
1170     addSlowCase(emitJumpIfNotImmediateNumber(reg));
1171 }
1172
1173 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
1174 {
1175     emitFastArithIntToImmNoCheck(src, dest);
1176 }
1177
1178 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
1179 {
1180     or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
1181 }
1182
1183 #endif // USE(JSVALUE32_64)
1184
1185 template <typename T>
1186 JIT::Jump JIT::branchStructure(RelationalCondition condition, T leftHandSide, Structure* structure)
1187 {
1188 #if USE(JSVALUE64)
1189     return branch32(condition, leftHandSide, TrustedImm32(structure->id()));
1190 #else
1191     return branchPtr(condition, leftHandSide, TrustedImmPtr(structure));
1192 #endif
1193 }
1194
1195 template <typename T>
1196 MacroAssembler::Jump branchStructure(MacroAssembler& jit, MacroAssembler::RelationalCondition condition, T leftHandSide, Structure* structure)
1197 {
1198 #if USE(JSVALUE64)
1199     return jit.branch32(condition, leftHandSide, MacroAssembler::TrustedImm32(structure->id()));
1200 #else
1201     return jit.branchPtr(condition, leftHandSide, MacroAssembler::TrustedImmPtr(structure));
1202 #endif
1203 }
1204
1205 } // namespace JSC
1206
1207 #endif // ENABLE(JIT)
1208
1209 #endif // JITInlines_h
1210