4855f1d2231ff1a44e7bfab59d2a5abd4256370f
[WebKit.git] / Source / JavaScriptCore / jit / JITArithmetic.cpp
1 /*
2  * Copyright (C) 2008, 2015-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "JITAddGenerator.h"
33 #include "JITBitAndGenerator.h"
34 #include "JITBitOrGenerator.h"
35 #include "JITBitXorGenerator.h"
36 #include "JITDivGenerator.h"
37 #include "JITInlines.h"
38 #include "JITLeftShiftGenerator.h"
39 #include "JITMulGenerator.h"
40 #include "JITNegGenerator.h"
41 #include "JITOperations.h"
42 #include "JITRightShiftGenerator.h"
43 #include "JITSubGenerator.h"
44 #include "JSArray.h"
45 #include "JSFunction.h"
46 #include "Interpreter.h"
47 #include "JSCInlines.h"
48 #include "ResultType.h"
49 #include "SlowPathCall.h"
50
51
52 namespace JSC {
53
54 void JIT::emit_op_jless(Instruction* currentInstruction)
55 {
56     int op1 = currentInstruction[1].u.operand;
57     int op2 = currentInstruction[2].u.operand;
58     unsigned target = currentInstruction[3].u.operand;
59
60     emit_compareAndJump(op_jless, op1, op2, target, LessThan);
61 }
62
63 void JIT::emit_op_jlesseq(Instruction* currentInstruction)
64 {
65     int op1 = currentInstruction[1].u.operand;
66     int op2 = currentInstruction[2].u.operand;
67     unsigned target = currentInstruction[3].u.operand;
68
69     emit_compareAndJump(op_jlesseq, op1, op2, target, LessThanOrEqual);
70 }
71
72 void JIT::emit_op_jgreater(Instruction* currentInstruction)
73 {
74     int op1 = currentInstruction[1].u.operand;
75     int op2 = currentInstruction[2].u.operand;
76     unsigned target = currentInstruction[3].u.operand;
77
78     emit_compareAndJump(op_jgreater, op1, op2, target, GreaterThan);
79 }
80
81 void JIT::emit_op_jgreatereq(Instruction* currentInstruction)
82 {
83     int op1 = currentInstruction[1].u.operand;
84     int op2 = currentInstruction[2].u.operand;
85     unsigned target = currentInstruction[3].u.operand;
86
87     emit_compareAndJump(op_jgreatereq, op1, op2, target, GreaterThanOrEqual);
88 }
89
90 void JIT::emit_op_jnless(Instruction* currentInstruction)
91 {
92     int op1 = currentInstruction[1].u.operand;
93     int op2 = currentInstruction[2].u.operand;
94     unsigned target = currentInstruction[3].u.operand;
95
96     emit_compareAndJump(op_jnless, op1, op2, target, GreaterThanOrEqual);
97 }
98
99 void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
100 {
101     int op1 = currentInstruction[1].u.operand;
102     int op2 = currentInstruction[2].u.operand;
103     unsigned target = currentInstruction[3].u.operand;
104
105     emit_compareAndJump(op_jnlesseq, op1, op2, target, GreaterThan);
106 }
107
108 void JIT::emit_op_jngreater(Instruction* currentInstruction)
109 {
110     int op1 = currentInstruction[1].u.operand;
111     int op2 = currentInstruction[2].u.operand;
112     unsigned target = currentInstruction[3].u.operand;
113
114     emit_compareAndJump(op_jngreater, op1, op2, target, LessThanOrEqual);
115 }
116
117 void JIT::emit_op_jngreatereq(Instruction* currentInstruction)
118 {
119     int op1 = currentInstruction[1].u.operand;
120     int op2 = currentInstruction[2].u.operand;
121     unsigned target = currentInstruction[3].u.operand;
122
123     emit_compareAndJump(op_jngreatereq, op1, op2, target, LessThan);
124 }
125
126 void JIT::emitSlow_op_jless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
127 {
128     int op1 = currentInstruction[1].u.operand;
129     int op2 = currentInstruction[2].u.operand;
130     unsigned target = currentInstruction[3].u.operand;
131
132     emit_compareAndJumpSlow(op1, op2, target, DoubleLessThan, operationCompareLess, false, iter);
133 }
134
135 void JIT::emitSlow_op_jlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
136 {
137     int op1 = currentInstruction[1].u.operand;
138     int op2 = currentInstruction[2].u.operand;
139     unsigned target = currentInstruction[3].u.operand;
140
141     emit_compareAndJumpSlow(op1, op2, target, DoubleLessThanOrEqual, operationCompareLessEq, false, iter);
142 }
143
144 void JIT::emitSlow_op_jgreater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
145 {
146     int op1 = currentInstruction[1].u.operand;
147     int op2 = currentInstruction[2].u.operand;
148     unsigned target = currentInstruction[3].u.operand;
149
150     emit_compareAndJumpSlow(op1, op2, target, DoubleGreaterThan, operationCompareGreater, false, iter);
151 }
152
153 void JIT::emitSlow_op_jgreatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
154 {
155     int op1 = currentInstruction[1].u.operand;
156     int op2 = currentInstruction[2].u.operand;
157     unsigned target = currentInstruction[3].u.operand;
158
159     emit_compareAndJumpSlow(op1, op2, target, DoubleGreaterThanOrEqual, operationCompareGreaterEq, false, iter);
160 }
161
162 void JIT::emitSlow_op_jnless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
163 {
164     int op1 = currentInstruction[1].u.operand;
165     int op2 = currentInstruction[2].u.operand;
166     unsigned target = currentInstruction[3].u.operand;
167
168     emit_compareAndJumpSlow(op1, op2, target, DoubleGreaterThanOrEqualOrUnordered, operationCompareLess, true, iter);
169 }
170
171 void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
172 {
173     int op1 = currentInstruction[1].u.operand;
174     int op2 = currentInstruction[2].u.operand;
175     unsigned target = currentInstruction[3].u.operand;
176
177     emit_compareAndJumpSlow(op1, op2, target, DoubleGreaterThanOrUnordered, operationCompareLessEq, true, iter);
178 }
179
180 void JIT::emitSlow_op_jngreater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
181 {
182     int op1 = currentInstruction[1].u.operand;
183     int op2 = currentInstruction[2].u.operand;
184     unsigned target = currentInstruction[3].u.operand;
185
186     emit_compareAndJumpSlow(op1, op2, target, DoubleLessThanOrEqualOrUnordered, operationCompareGreater, true, iter);
187 }
188
189 void JIT::emitSlow_op_jngreatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
190 {
191     int op1 = currentInstruction[1].u.operand;
192     int op2 = currentInstruction[2].u.operand;
193     unsigned target = currentInstruction[3].u.operand;
194
195     emit_compareAndJumpSlow(op1, op2, target, DoubleLessThanOrUnordered, operationCompareGreaterEq, true, iter);
196 }
197
198 #if USE(JSVALUE64)
199
200 void JIT::emit_op_unsigned(Instruction* currentInstruction)
201 {
202     int result = currentInstruction[1].u.operand;
203     int op1 = currentInstruction[2].u.operand;
204     
205     emitGetVirtualRegister(op1, regT0);
206     emitJumpSlowCaseIfNotInt(regT0);
207     addSlowCase(branch32(LessThan, regT0, TrustedImm32(0)));
208     emitTagInt(regT0, regT0);
209     emitPutVirtualRegister(result, regT0);
210 }
211
212 void JIT::emitSlow_op_unsigned(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
213 {
214     linkSlowCase(iter);
215     linkSlowCase(iter);
216     
217     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_unsigned);
218     slowPathCall.call();
219 }
220
221 void JIT::emit_compareAndJump(OpcodeID, int op1, int op2, unsigned target, RelationalCondition condition)
222 {
223     // We generate inline code for the following cases in the fast path:
224     // - int immediate to constant int immediate
225     // - constant int immediate to int immediate
226     // - int immediate to int immediate
227
228     if (isOperandConstantChar(op1)) {
229         emitGetVirtualRegister(op2, regT0);
230         addSlowCase(emitJumpIfNotJSCell(regT0));
231         JumpList failures;
232         emitLoadCharacterString(regT0, regT0, failures);
233         addSlowCase(failures);
234         addJump(branch32(commute(condition), regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
235         return;
236     }
237     if (isOperandConstantChar(op2)) {
238         emitGetVirtualRegister(op1, regT0);
239         addSlowCase(emitJumpIfNotJSCell(regT0));
240         JumpList failures;
241         emitLoadCharacterString(regT0, regT0, failures);
242         addSlowCase(failures);
243         addJump(branch32(condition, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
244         return;
245     }
246     if (isOperandConstantInt(op2)) {
247         emitGetVirtualRegister(op1, regT0);
248         emitJumpSlowCaseIfNotInt(regT0);
249         int32_t op2imm = getOperandConstantInt(op2);
250         addJump(branch32(condition, regT0, Imm32(op2imm)), target);
251     } else if (isOperandConstantInt(op1)) {
252         emitGetVirtualRegister(op2, regT1);
253         emitJumpSlowCaseIfNotInt(regT1);
254         int32_t op1imm = getOperandConstantInt(op1);
255         addJump(branch32(commute(condition), regT1, Imm32(op1imm)), target);
256     } else {
257         emitGetVirtualRegisters(op1, regT0, op2, regT1);
258         emitJumpSlowCaseIfNotInt(regT0);
259         emitJumpSlowCaseIfNotInt(regT1);
260
261         addJump(branch32(condition, regT0, regT1), target);
262     }
263 }
264
265 void JIT::emit_compareAndJumpSlow(int op1, int op2, unsigned target, DoubleCondition condition, size_t (JIT_OPERATION *operation)(ExecState*, EncodedJSValue, EncodedJSValue), bool invert, Vector<SlowCaseEntry>::iterator& iter)
266 {
267     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jlesseq), OPCODE_LENGTH_op_jlesseq_equals_op_jless);
268     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jnless), OPCODE_LENGTH_op_jnless_equals_op_jless);
269     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jnlesseq), OPCODE_LENGTH_op_jnlesseq_equals_op_jless);
270     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jgreater), OPCODE_LENGTH_op_jgreater_equals_op_jless);
271     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jgreatereq), OPCODE_LENGTH_op_jgreatereq_equals_op_jless);
272     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jngreater), OPCODE_LENGTH_op_jngreater_equals_op_jless);
273     COMPILE_ASSERT(OPCODE_LENGTH(op_jless) == OPCODE_LENGTH(op_jngreatereq), OPCODE_LENGTH_op_jngreatereq_equals_op_jless);
274     
275     // We generate inline code for the following cases in the slow path:
276     // - floating-point number to constant int immediate
277     // - constant int immediate to floating-point number
278     // - floating-point number to floating-point number.
279     if (isOperandConstantChar(op1) || isOperandConstantChar(op2)) {
280         linkSlowCase(iter);
281         linkSlowCase(iter);
282         linkSlowCase(iter);
283         linkSlowCase(iter);
284
285         emitGetVirtualRegister(op1, argumentGPR0);
286         emitGetVirtualRegister(op2, argumentGPR1);
287         callOperation(operation, argumentGPR0, argumentGPR1);
288         emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, returnValueGPR), target);
289         return;
290     }
291
292     if (isOperandConstantInt(op2)) {
293         linkSlowCase(iter);
294
295         if (supportsFloatingPoint()) {
296             Jump fail1 = emitJumpIfNotNumber(regT0);
297             add64(tagTypeNumberRegister, regT0);
298             move64ToDouble(regT0, fpRegT0);
299
300             int32_t op2imm = getConstantOperand(op2).asInt32();
301
302             move(Imm32(op2imm), regT1);
303             convertInt32ToDouble(regT1, fpRegT1);
304
305             emitJumpSlowToHot(branchDouble(condition, fpRegT0, fpRegT1), target);
306
307             emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jless));
308
309             fail1.link(this);
310         }
311
312         emitGetVirtualRegister(op2, regT1);
313         callOperation(operation, regT0, regT1);
314         emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, returnValueGPR), target);
315     } else if (isOperandConstantInt(op1)) {
316         linkSlowCase(iter);
317
318         if (supportsFloatingPoint()) {
319             Jump fail1 = emitJumpIfNotNumber(regT1);
320             add64(tagTypeNumberRegister, regT1);
321             move64ToDouble(regT1, fpRegT1);
322
323             int32_t op1imm = getConstantOperand(op1).asInt32();
324
325             move(Imm32(op1imm), regT0);
326             convertInt32ToDouble(regT0, fpRegT0);
327
328             emitJumpSlowToHot(branchDouble(condition, fpRegT0, fpRegT1), target);
329
330             emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jless));
331
332             fail1.link(this);
333         }
334
335         emitGetVirtualRegister(op1, regT2);
336         callOperation(operation, regT2, regT1);
337         emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, returnValueGPR), target);
338     } else {
339         linkSlowCase(iter);
340
341         if (supportsFloatingPoint()) {
342             Jump fail1 = emitJumpIfNotNumber(regT0);
343             Jump fail2 = emitJumpIfNotNumber(regT1);
344             Jump fail3 = emitJumpIfInt(regT1);
345             add64(tagTypeNumberRegister, regT0);
346             add64(tagTypeNumberRegister, regT1);
347             move64ToDouble(regT0, fpRegT0);
348             move64ToDouble(regT1, fpRegT1);
349
350             emitJumpSlowToHot(branchDouble(condition, fpRegT0, fpRegT1), target);
351
352             emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jless));
353
354             fail1.link(this);
355             fail2.link(this);
356             fail3.link(this);
357         }
358
359         linkSlowCase(iter);
360         callOperation(operation, regT0, regT1);
361         emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, returnValueGPR), target);
362     }
363 }
364
365 void JIT::emit_op_inc(Instruction* currentInstruction)
366 {
367     int srcDst = currentInstruction[1].u.operand;
368
369     emitGetVirtualRegister(srcDst, regT0);
370     emitJumpSlowCaseIfNotInt(regT0);
371     addSlowCase(branchAdd32(Overflow, TrustedImm32(1), regT0));
372     emitTagInt(regT0, regT0);
373     emitPutVirtualRegister(srcDst);
374 }
375
376 void JIT::emitSlow_op_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
377 {
378     linkSlowCase(iter);
379     linkSlowCase(iter);
380     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_inc);
381     slowPathCall.call();
382 }
383
384 void JIT::emit_op_dec(Instruction* currentInstruction)
385 {
386     int srcDst = currentInstruction[1].u.operand;
387
388     emitGetVirtualRegister(srcDst, regT0);
389     emitJumpSlowCaseIfNotInt(regT0);
390     addSlowCase(branchSub32(Overflow, TrustedImm32(1), regT0));
391     emitTagInt(regT0, regT0);
392     emitPutVirtualRegister(srcDst);
393 }
394
395 void JIT::emitSlow_op_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
396 {
397     linkSlowCase(iter);
398     linkSlowCase(iter);
399     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_dec);
400     slowPathCall.call();
401 }
402
403 /* ------------------------------ BEGIN: OP_MOD ------------------------------ */
404
405 #if CPU(X86) || CPU(X86_64)
406
407 void JIT::emit_op_mod(Instruction* currentInstruction)
408 {
409     int result = currentInstruction[1].u.operand;
410     int op1 = currentInstruction[2].u.operand;
411     int op2 = currentInstruction[3].u.operand;
412
413     // Make sure registers are correct for x86 IDIV instructions.
414     ASSERT(regT0 == X86Registers::eax);
415     auto edx = X86Registers::edx;
416     auto ecx = X86Registers::ecx;
417     ASSERT(regT4 != edx);
418     ASSERT(regT4 != ecx);
419
420     emitGetVirtualRegisters(op1, regT4, op2, ecx);
421     emitJumpSlowCaseIfNotInt(regT4);
422     emitJumpSlowCaseIfNotInt(ecx);
423
424     move(regT4, regT0);
425     addSlowCase(branchTest32(Zero, ecx));
426     Jump denominatorNotNeg1 = branch32(NotEqual, ecx, TrustedImm32(-1));
427     addSlowCase(branch32(Equal, regT0, TrustedImm32(-2147483647-1)));
428     denominatorNotNeg1.link(this);
429     x86ConvertToDoubleWord32();
430     x86Div32(ecx);
431     Jump numeratorPositive = branch32(GreaterThanOrEqual, regT4, TrustedImm32(0));
432     addSlowCase(branchTest32(Zero, edx));
433     numeratorPositive.link(this);
434     emitTagInt(edx, regT0);
435     emitPutVirtualRegister(result);
436 }
437
438 void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
439 {
440     linkSlowCase(iter);
441     linkSlowCase(iter);
442     linkSlowCase(iter);
443     linkSlowCase(iter);
444     linkSlowCase(iter);
445     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_mod);
446     slowPathCall.call();
447 }
448
449 #else // CPU(X86) || CPU(X86_64)
450
451 void JIT::emit_op_mod(Instruction* currentInstruction)
452 {
453     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_mod);
454     slowPathCall.call();
455 }
456
457 void JIT::emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&)
458 {
459     UNREACHABLE_FOR_PLATFORM();
460 }
461
462 #endif // CPU(X86) || CPU(X86_64)
463
464 /* ------------------------------ END: OP_MOD ------------------------------ */
465
466 #endif // USE(JSVALUE64)
467
468 void JIT::emit_op_negate(Instruction* currentInstruction)
469 {
470     int result = currentInstruction[1].u.operand;
471     int src = currentInstruction[2].u.operand;
472
473 #if USE(JSVALUE64)
474     JSValueRegs srcRegs = JSValueRegs(regT0);
475     JSValueRegs resultRegs = srcRegs;
476     GPRReg scratchGPR = regT2;
477 #else
478     JSValueRegs srcRegs = JSValueRegs(regT1, regT0);
479     JSValueRegs resultRegs = srcRegs;
480     GPRReg scratchGPR = regT4;
481 #endif
482
483     emitGetVirtualRegister(src, srcRegs);
484
485     JITNegGenerator gen(resultRegs, srcRegs, scratchGPR);
486     gen.generateFastPath(*this);
487
488     ASSERT(gen.didEmitFastPath());
489     gen.endJumpList().link(this);
490     emitPutVirtualRegister(result, resultRegs);
491
492     addSlowCase(gen.slowPathJumpList());
493 }
494
495 void JIT::emitSlow_op_negate(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
496 {
497     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
498
499     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_negate);
500     slowPathCall.call();
501 }
502
503 template<typename SnippetGenerator>
504 void JIT::emitBitBinaryOpFastPath(Instruction* currentInstruction)
505 {
506     int result = currentInstruction[1].u.operand;
507     int op1 = currentInstruction[2].u.operand;
508     int op2 = currentInstruction[3].u.operand;
509
510 #if USE(JSVALUE64)
511     JSValueRegs leftRegs = JSValueRegs(regT0);
512     JSValueRegs rightRegs = JSValueRegs(regT1);
513     JSValueRegs resultRegs = leftRegs;
514     GPRReg scratchGPR = regT2;
515 #else
516     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
517     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
518     JSValueRegs resultRegs = leftRegs;
519     GPRReg scratchGPR = regT4;
520 #endif
521
522     SnippetOperand leftOperand;
523     SnippetOperand rightOperand;
524
525     if (isOperandConstantInt(op1))
526         leftOperand.setConstInt32(getOperandConstantInt(op1));
527     else if (isOperandConstantInt(op2))
528         rightOperand.setConstInt32(getOperandConstantInt(op2));
529
530     RELEASE_ASSERT(!leftOperand.isConst() || !rightOperand.isConst());
531
532     if (!leftOperand.isConst())
533         emitGetVirtualRegister(op1, leftRegs);
534     if (!rightOperand.isConst())
535         emitGetVirtualRegister(op2, rightRegs);
536
537     SnippetGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs, scratchGPR);
538
539     gen.generateFastPath(*this);
540
541     ASSERT(gen.didEmitFastPath());
542     gen.endJumpList().link(this);
543     emitPutVirtualRegister(result, resultRegs);
544
545     addSlowCase(gen.slowPathJumpList());
546 }
547
548 void JIT::emit_op_bitand(Instruction* currentInstruction)
549 {
550     emitBitBinaryOpFastPath<JITBitAndGenerator>(currentInstruction);
551 }
552
553 void JIT::emitSlow_op_bitand(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
554 {
555     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
556
557     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitand);
558     slowPathCall.call();
559 }
560
561 void JIT::emit_op_bitor(Instruction* currentInstruction)
562 {
563     emitBitBinaryOpFastPath<JITBitOrGenerator>(currentInstruction);
564 }
565
566 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
567 {
568     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
569
570     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
571     slowPathCall.call();
572 }
573
574 void JIT::emit_op_bitxor(Instruction* currentInstruction)
575 {
576     emitBitBinaryOpFastPath<JITBitXorGenerator>(currentInstruction);
577 }
578
579 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
580 {
581     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
582
583     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
584     slowPathCall.call();
585 }
586
587 void JIT::emit_op_lshift(Instruction* currentInstruction)
588 {
589     emitBitBinaryOpFastPath<JITLeftShiftGenerator>(currentInstruction);
590 }
591
592 void JIT::emitSlow_op_lshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
593 {
594     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
595
596     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_lshift);
597     slowPathCall.call();
598 }
599
600 void JIT::emitRightShiftFastPath(Instruction* currentInstruction, OpcodeID opcodeID)
601 {
602     ASSERT(opcodeID == op_rshift || opcodeID == op_urshift);
603
604     JITRightShiftGenerator::ShiftType snippetShiftType = opcodeID == op_rshift ?
605         JITRightShiftGenerator::SignedShift : JITRightShiftGenerator::UnsignedShift;
606
607     int result = currentInstruction[1].u.operand;
608     int op1 = currentInstruction[2].u.operand;
609     int op2 = currentInstruction[3].u.operand;
610
611 #if USE(JSVALUE64)
612     JSValueRegs leftRegs = JSValueRegs(regT0);
613     JSValueRegs rightRegs = JSValueRegs(regT1);
614     JSValueRegs resultRegs = leftRegs;
615     GPRReg scratchGPR = regT2;
616     FPRReg scratchFPR = InvalidFPRReg;
617 #else
618     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
619     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
620     JSValueRegs resultRegs = leftRegs;
621     GPRReg scratchGPR = regT4;
622     FPRReg scratchFPR = fpRegT2;
623 #endif
624
625     SnippetOperand leftOperand;
626     SnippetOperand rightOperand;
627
628     if (isOperandConstantInt(op1))
629         leftOperand.setConstInt32(getOperandConstantInt(op1));
630     else if (isOperandConstantInt(op2))
631         rightOperand.setConstInt32(getOperandConstantInt(op2));
632
633     RELEASE_ASSERT(!leftOperand.isConst() || !rightOperand.isConst());
634
635     if (!leftOperand.isConst())
636         emitGetVirtualRegister(op1, leftRegs);
637     if (!rightOperand.isConst())
638         emitGetVirtualRegister(op2, rightRegs);
639
640     JITRightShiftGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs,
641         fpRegT0, scratchGPR, scratchFPR, snippetShiftType);
642
643     gen.generateFastPath(*this);
644
645     ASSERT(gen.didEmitFastPath());
646     gen.endJumpList().link(this);
647     emitPutVirtualRegister(result, resultRegs);
648
649     addSlowCase(gen.slowPathJumpList());
650 }
651
652 void JIT::emit_op_rshift(Instruction* currentInstruction)
653 {
654     emitRightShiftFastPath(currentInstruction, op_rshift);
655 }
656
657 void JIT::emitSlow_op_rshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
658 {
659     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
660
661     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_rshift);
662     slowPathCall.call();
663 }
664
665 void JIT::emit_op_urshift(Instruction* currentInstruction)
666 {
667     emitRightShiftFastPath(currentInstruction, op_urshift);
668 }
669
670 void JIT::emitSlow_op_urshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
671 {
672     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
673
674     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_urshift);
675     slowPathCall.call();
676 }
677
678 void JIT::emit_op_add(Instruction* currentInstruction)
679 {
680     int result = currentInstruction[1].u.operand;
681     int op1 = currentInstruction[2].u.operand;
682     int op2 = currentInstruction[3].u.operand;
683     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
684
685 #if USE(JSVALUE64)
686     JSValueRegs leftRegs = JSValueRegs(regT0);
687     JSValueRegs rightRegs = JSValueRegs(regT1);
688     JSValueRegs resultRegs = leftRegs;
689     GPRReg scratchGPR = regT2;
690     FPRReg scratchFPR = InvalidFPRReg;
691 #else
692     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
693     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
694     JSValueRegs resultRegs = leftRegs;
695     GPRReg scratchGPR = regT4;
696     FPRReg scratchFPR = fpRegT2;
697 #endif
698
699     ResultProfile* resultProfile = nullptr;
700     if (shouldEmitProfiling())
701         resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
702
703     SnippetOperand leftOperand(types.first());
704     SnippetOperand rightOperand(types.second());
705
706     if (isOperandConstantInt(op1))
707         leftOperand.setConstInt32(getOperandConstantInt(op1));
708     else if (isOperandConstantInt(op2))
709         rightOperand.setConstInt32(getOperandConstantInt(op2));
710
711     RELEASE_ASSERT(!leftOperand.isConst() || !rightOperand.isConst());
712
713     if (!leftOperand.isConst())
714         emitGetVirtualRegister(op1, leftRegs);
715     if (!rightOperand.isConst())
716         emitGetVirtualRegister(op2, rightRegs);
717
718     JITAddGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs,
719         fpRegT0, fpRegT1, scratchGPR, scratchFPR, resultProfile);
720
721     gen.generateFastPath(*this);
722
723     if (gen.didEmitFastPath()) {
724         gen.endJumpList().link(this);
725         emitPutVirtualRegister(result, resultRegs);
726         
727         addSlowCase(gen.slowPathJumpList());
728     } else {
729         ASSERT(gen.endJumpList().empty());
730         ASSERT(gen.slowPathJumpList().empty());
731         if (resultProfile) {
732             if (leftOperand.isConst())
733                 emitGetVirtualRegister(op1, leftRegs);
734             if (rightOperand.isConst())
735                 emitGetVirtualRegister(op2, rightRegs);
736             callOperation(operationValueAddProfiled, resultRegs, leftRegs, rightRegs, resultProfile);
737             emitPutVirtualRegister(result, resultRegs);
738         } else {
739             JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_add);
740             slowPathCall.call();
741         }
742     }
743 }
744
745 void JIT::emitSlow_op_add(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
746 {
747     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
748
749     int result = currentInstruction[1].u.operand;
750     int op1 = currentInstruction[2].u.operand;
751     int op2 = currentInstruction[3].u.operand;
752     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
753
754 #if USE(JSVALUE64)
755     JSValueRegs leftRegs = JSValueRegs(regT0);
756     JSValueRegs rightRegs = JSValueRegs(regT1);
757     JSValueRegs resultRegs = leftRegs;
758 #else
759     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
760     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
761     JSValueRegs resultRegs = leftRegs;
762 #endif
763     
764     SnippetOperand leftOperand(types.first());
765     SnippetOperand rightOperand(types.second());
766
767     if (isOperandConstantInt(op1))
768         leftOperand.setConstInt32(getOperandConstantInt(op1));
769     else if (isOperandConstantInt(op2))
770         rightOperand.setConstInt32(getOperandConstantInt(op2));
771
772     if (shouldEmitProfiling()) {
773         if (leftOperand.isConst())
774             emitGetVirtualRegister(op1, leftRegs);
775         if (rightOperand.isConst())
776             emitGetVirtualRegister(op2, rightRegs);
777         ResultProfile* resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
778         callOperation(operationValueAddProfiled, resultRegs, leftRegs, rightRegs, resultProfile);
779         emitPutVirtualRegister(result, resultRegs);
780     } else {
781         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_add);
782         slowPathCall.call();
783     }
784 }
785
786 void JIT::emit_op_div(Instruction* currentInstruction)
787 {
788     int result = currentInstruction[1].u.operand;
789     int op1 = currentInstruction[2].u.operand;
790     int op2 = currentInstruction[3].u.operand;
791     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
792
793 #if USE(JSVALUE64)
794     JSValueRegs leftRegs = JSValueRegs(regT0);
795     JSValueRegs rightRegs = JSValueRegs(regT1);
796     JSValueRegs resultRegs = leftRegs;
797     GPRReg scratchGPR = regT2;
798 #else
799     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
800     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
801     JSValueRegs resultRegs = leftRegs;
802     GPRReg scratchGPR = regT4;
803 #endif
804     FPRReg scratchFPR = fpRegT2;
805
806     ResultProfile* resultProfile = nullptr;
807     if (shouldEmitProfiling())
808         resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
809
810     SnippetOperand leftOperand(types.first());
811     SnippetOperand rightOperand(types.second());
812
813     if (isOperandConstantInt(op1))
814         leftOperand.setConstInt32(getOperandConstantInt(op1));
815 #if USE(JSVALUE64)
816     else if (isOperandConstantDouble(op1))
817         leftOperand.setConstDouble(getOperandConstantDouble(op1));
818 #endif
819     else if (isOperandConstantInt(op2))
820         rightOperand.setConstInt32(getOperandConstantInt(op2));
821 #if USE(JSVALUE64)
822     else if (isOperandConstantDouble(op2))
823         rightOperand.setConstDouble(getOperandConstantDouble(op2));
824 #endif
825
826     RELEASE_ASSERT(!leftOperand.isConst() || !rightOperand.isConst());
827
828     if (!leftOperand.isConst())
829         emitGetVirtualRegister(op1, leftRegs);
830     if (!rightOperand.isConst())
831         emitGetVirtualRegister(op2, rightRegs);
832
833     JITDivGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs,
834         fpRegT0, fpRegT1, scratchGPR, scratchFPR, resultProfile);
835
836     gen.generateFastPath(*this);
837
838     if (gen.didEmitFastPath()) {
839         gen.endJumpList().link(this);
840         emitPutVirtualRegister(result, resultRegs);
841
842         addSlowCase(gen.slowPathJumpList());
843     } else {
844         ASSERT(gen.endJumpList().empty());
845         ASSERT(gen.slowPathJumpList().empty());
846         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_div);
847         slowPathCall.call();
848     }
849 }
850
851 void JIT::emitSlow_op_div(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
852 {
853     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
854
855     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_div);
856     slowPathCall.call();
857 }
858
859 void JIT::emit_op_mul(Instruction* currentInstruction)
860 {
861     int result = currentInstruction[1].u.operand;
862     int op1 = currentInstruction[2].u.operand;
863     int op2 = currentInstruction[3].u.operand;
864     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
865
866 #if USE(JSVALUE64)
867     JSValueRegs leftRegs = JSValueRegs(regT0);
868     JSValueRegs rightRegs = JSValueRegs(regT1);
869     JSValueRegs resultRegs = JSValueRegs(regT2);
870     GPRReg scratchGPR = regT3;
871     FPRReg scratchFPR = InvalidFPRReg;
872 #else
873     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
874     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
875     JSValueRegs resultRegs = leftRegs;
876     GPRReg scratchGPR = regT4;
877     FPRReg scratchFPR = fpRegT2;
878 #endif
879
880     ResultProfile* resultProfile = nullptr;
881     if (shouldEmitProfiling())
882         resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
883
884     SnippetOperand leftOperand(types.first());
885     SnippetOperand rightOperand(types.second());
886
887     if (isOperandConstantInt(op1))
888         leftOperand.setConstInt32(getOperandConstantInt(op1));
889     else if (isOperandConstantInt(op2))
890         rightOperand.setConstInt32(getOperandConstantInt(op2));
891
892     RELEASE_ASSERT(!leftOperand.isConst() || !rightOperand.isConst());
893
894     if (!leftOperand.isPositiveConstInt32())
895         emitGetVirtualRegister(op1, leftRegs);
896     if (!rightOperand.isPositiveConstInt32())
897         emitGetVirtualRegister(op2, rightRegs);
898
899     JITMulGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs,
900         fpRegT0, fpRegT1, scratchGPR, scratchFPR, resultProfile);
901
902     gen.generateFastPath(*this);
903
904     if (gen.didEmitFastPath()) {
905         gen.endJumpList().link(this);
906         emitPutVirtualRegister(result, resultRegs);
907
908         addSlowCase(gen.slowPathJumpList());
909     } else {
910         ASSERT(gen.endJumpList().empty());
911         ASSERT(gen.slowPathJumpList().empty());
912         if (resultProfile) {
913             if (leftOperand.isPositiveConstInt32())
914                 emitGetVirtualRegister(op1, leftRegs);
915             if (rightOperand.isPositiveConstInt32())
916                 emitGetVirtualRegister(op2, rightRegs);
917             callOperation(operationValueMulProfiled, resultRegs, leftRegs, rightRegs, resultProfile);
918             emitPutVirtualRegister(result, resultRegs);
919         } else {
920             JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_mul);
921             slowPathCall.call();
922         }
923     }
924 }
925
926 void JIT::emitSlow_op_mul(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
927 {
928     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
929     
930     int result = currentInstruction[1].u.operand;
931     int op1 = currentInstruction[2].u.operand;
932     int op2 = currentInstruction[3].u.operand;
933     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
934
935 #if USE(JSVALUE64)
936     JSValueRegs leftRegs = JSValueRegs(regT0);
937     JSValueRegs rightRegs = JSValueRegs(regT1);
938     JSValueRegs resultRegs = leftRegs;
939 #else
940     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
941     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
942     JSValueRegs resultRegs = leftRegs;
943 #endif
944
945     SnippetOperand leftOperand(types.first());
946     SnippetOperand rightOperand(types.second());
947
948     if (isOperandConstantInt(op1))
949         leftOperand.setConstInt32(getOperandConstantInt(op1));
950     else if (isOperandConstantInt(op2))
951         rightOperand.setConstInt32(getOperandConstantInt(op2));
952
953     if (shouldEmitProfiling()) {
954         if (leftOperand.isPositiveConstInt32())
955             emitGetVirtualRegister(op1, leftRegs);
956         if (rightOperand.isPositiveConstInt32())
957             emitGetVirtualRegister(op2, rightRegs);
958         ResultProfile* resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
959         callOperation(operationValueMulProfiled, resultRegs, leftRegs, rightRegs, resultProfile);
960         emitPutVirtualRegister(result, resultRegs);
961     } else {
962         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_mul);
963         slowPathCall.call();
964     }
965 }
966
967 void JIT::emit_op_sub(Instruction* currentInstruction)
968 {
969     int result = currentInstruction[1].u.operand;
970     int op1 = currentInstruction[2].u.operand;
971     int op2 = currentInstruction[3].u.operand;
972     OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
973
974 #if USE(JSVALUE64)
975     JSValueRegs leftRegs = JSValueRegs(regT0);
976     JSValueRegs rightRegs = JSValueRegs(regT1);
977     JSValueRegs resultRegs = leftRegs;
978     GPRReg scratchGPR = regT2;
979     FPRReg scratchFPR = InvalidFPRReg;
980 #else
981     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
982     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
983     JSValueRegs resultRegs = leftRegs;
984     GPRReg scratchGPR = regT4;
985     FPRReg scratchFPR = fpRegT2;
986 #endif
987
988     ResultProfile* resultProfile = nullptr;
989     if (shouldEmitProfiling())
990         resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
991
992     SnippetOperand leftOperand(types.first());
993     SnippetOperand rightOperand(types.second());
994     
995     emitGetVirtualRegister(op1, leftRegs);
996     emitGetVirtualRegister(op2, rightRegs);
997
998     JITSubGenerator gen(leftOperand, rightOperand, resultRegs, leftRegs, rightRegs,
999         fpRegT0, fpRegT1, scratchGPR, scratchFPR, resultProfile);
1000
1001     gen.generateFastPath(*this);
1002
1003     ASSERT(gen.didEmitFastPath());
1004     gen.endJumpList().link(this);
1005     emitPutVirtualRegister(result, resultRegs);
1006
1007     addSlowCase(gen.slowPathJumpList());
1008 }
1009
1010 void JIT::emitSlow_op_sub(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1011 {
1012     linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
1013
1014     int result = currentInstruction[1].u.operand;
1015 #if USE(JSVALUE64)
1016     JSValueRegs leftRegs = JSValueRegs(regT0);
1017     JSValueRegs rightRegs = JSValueRegs(regT1);
1018     JSValueRegs resultRegs = leftRegs;
1019 #else
1020     JSValueRegs leftRegs = JSValueRegs(regT1, regT0);
1021     JSValueRegs rightRegs = JSValueRegs(regT3, regT2);
1022     JSValueRegs resultRegs = leftRegs;
1023 #endif
1024
1025     if (shouldEmitProfiling()) {
1026         ResultProfile* resultProfile = m_codeBlock->ensureResultProfile(m_bytecodeOffset);
1027         callOperation(operationValueSubProfiled, resultRegs, leftRegs, rightRegs, resultProfile);
1028         emitPutVirtualRegister(result, resultRegs);
1029     } else {
1030         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_sub);
1031         slowPathCall.call();
1032     }
1033 }
1034
1035 void JIT::emit_op_pow(Instruction* currentInstruction)
1036 {
1037     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_pow);
1038     slowPathCall.call();
1039 }
1040
1041 /* ------------------------------ END: OP_ADD, OP_SUB, OP_MUL, OP_POW ------------------------------ */
1042
1043 } // namespace JSC
1044
1045 #endif // ENABLE(JIT)