2 * Copyright (C) 2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef WASMFunctionCompiler_h
27 #define WASMFunctionCompiler_h
29 #if ENABLE(WEBASSEMBLY)
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
34 #include "JITOperations.h"
35 #include "LinkBuffer.h"
36 #include "MaxFrameExtentForSlowPathCall.h"
42 static int32_t JIT_OPERATION operationConvertJSValueToInt32(ExecState* exec, EncodedJSValue value)
44 return JSValue::decode(value).toInt32(exec);
47 static double JIT_OPERATION operationConvertJSValueToDouble(ExecState* exec, EncodedJSValue value)
49 return JSValue::decode(value).toNumber(exec);
52 #if !CPU(X86) && !CPU(X86_64)
53 static int32_t JIT_OPERATION operationDiv(int32_t left, int32_t right)
58 static int32_t JIT_OPERATION operationMod(int32_t left, int32_t right)
63 static uint32_t JIT_OPERATION operationUnsignedDiv(uint32_t left, uint32_t right)
68 static uint32_t JIT_OPERATION operationUnsignedMod(uint32_t left, uint32_t right)
74 class WASMFunctionCompiler : private CCallHelpers {
76 typedef int Expression;
77 typedef int Statement;
78 typedef int ExpressionList;
83 enum class JumpCondition { Zero, NonZero };
85 WASMFunctionCompiler(VM& vm, CodeBlock* codeBlock, JSWASMModule* module, unsigned stackHeight)
86 : CCallHelpers(&vm, codeBlock)
88 , m_stackHeight(stackHeight)
92 void startFunction(const Vector<WASMType>& arguments, uint32_t numberOfI32LocalVariables, uint32_t numberOfF32LocalVariables, uint32_t numberOfF64LocalVariables)
94 emitFunctionPrologue();
95 emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock);
97 m_beginLabel = label();
99 addPtr(TrustedImm32(-WTF::roundUpToMultipleOf(stackAlignmentRegisters(), m_stackHeight) * sizeof(StackSlot) - maxFrameExtentForSlowPathCall), GPRInfo::callFrameRegister, GPRInfo::regT1);
100 m_stackOverflow = branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), GPRInfo::regT1);
102 move(GPRInfo::regT1, stackPointerRegister);
103 checkStackPointerAlignment();
105 m_numberOfLocals = arguments.size() + numberOfI32LocalVariables + numberOfF32LocalVariables + numberOfF64LocalVariables;
107 unsigned localIndex = 0;
108 for (size_t i = 0; i < arguments.size(); ++i) {
109 Address address(GPRInfo::callFrameRegister, CallFrame::argumentOffset(i) * sizeof(Register));
110 switch (arguments[i]) {
113 loadValueAndConvertToInt32(address, GPRInfo::regT0);
115 loadValueAndConvertToInt32(address, GPRInfo::regT0, GPRInfo::regT1);
117 store32(GPRInfo::regT0, localAddress(localIndex++));
122 loadValueAndConvertToDouble(address, FPRInfo::fpRegT0, GPRInfo::regT0, GPRInfo::regT1);
124 loadValueAndConvertToDouble(address, FPRInfo::fpRegT0, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT2, FPRInfo::fpRegT1);
126 if (arguments[i] == WASMType::F32)
127 convertDoubleToFloat(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
128 storeDouble(FPRInfo::fpRegT0, localAddress(localIndex++));
131 ASSERT_NOT_REACHED();
134 for (uint32_t i = 0; i < numberOfI32LocalVariables; ++i)
135 store32(TrustedImm32(0), localAddress(localIndex++));
136 for (uint32_t i = 0; i < numberOfF32LocalVariables; ++i)
137 store32(TrustedImm32(0), localAddress(localIndex++));
138 for (uint32_t i = 0; i < numberOfF64LocalVariables; ++i) {
140 store64(TrustedImm64(0), localAddress(localIndex++));
142 store32(TrustedImm32(0), localAddress(localIndex));
143 store32(TrustedImm32(0), localAddress(localIndex).withOffset(4));
148 m_codeBlock->setNumParameters(1 + arguments.size());
153 ASSERT(!m_tempStackTop);
155 // FIXME: Remove these if the last statement is a return statement.
157 JSValueRegs returnValueRegs(GPRInfo::returnValueGPR);
159 JSValueRegs returnValueRegs(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
161 moveTrustedValue(jsUndefined(), returnValueRegs);
162 emitFunctionEpilogue();
165 m_stackOverflow.link(this);
166 if (maxFrameExtentForSlowPathCall)
167 addPtr(TrustedImm32(-maxFrameExtentForSlowPathCall), stackPointerRegister);
168 setupArgumentsWithExecState(TrustedImmPtr(m_codeBlock));
169 appendCallWithExceptionCheck(operationThrowStackOverflowError);
171 // FIXME: Implement arity check.
172 Label arityCheck = label();
173 emitFunctionPrologue();
174 emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock);
177 if (!m_divideErrorJumpList.empty()) {
178 m_divideErrorJumpList.link(this);
180 setupArgumentsExecState();
181 appendCallWithExceptionCheck(operationThrowDivideError);
184 if (!m_exceptionChecks.empty()) {
185 m_exceptionChecks.link(this);
187 // lookupExceptionHandler is passed two arguments, the VM and the exec (the CallFrame*).
188 move(TrustedImmPtr(vm()), GPRInfo::argumentGPR0);
189 move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
192 // FIXME: should use the call abstraction, but this is currently in the SpeculativeJIT layer!
193 poke(GPRInfo::argumentGPR0);
194 poke(GPRInfo::argumentGPR1, 1);
196 m_calls.append(std::make_pair(call(), FunctionPtr(lookupExceptionHandlerFromCallerFrame).value()));
197 jumpToExceptionHandler();
200 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock, JITCompilationMustSucceed);
202 for (const auto& iterator : m_calls)
203 patchBuffer.link(iterator.first, FunctionPtr(iterator.second));
205 for (size_t i = 0; i < m_callCompilationInfo.size(); ++i) {
206 CallCompilationInfo& compilationInfo = m_callCompilationInfo[i];
207 CallLinkInfo& info = *compilationInfo.callLinkInfo;
208 info.setCallLocations(patchBuffer.locationOfNearCall(compilationInfo.callReturnLocation),
209 patchBuffer.locationOf(compilationInfo.hotPathBegin),
210 patchBuffer.locationOfNearCall(compilationInfo.hotPathOther));
213 MacroAssemblerCodePtr withArityCheck = patchBuffer.locationOf(arityCheck);
214 CodeRef result = FINALIZE_CODE(patchBuffer, ("Baseline JIT code for WebAssembly"));
215 m_codeBlock->setJITCode(adoptRef(new DirectJITCode(result, withArityCheck, JITCode::BaselineJIT)));
216 m_codeBlock->capabilityLevel();
219 void buildSetLocal(uint32_t localIndex, int, WASMType type)
224 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT0);
226 store32(GPRInfo::regT0, localAddress(localIndex));
229 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT0);
231 storeDouble(FPRInfo::fpRegT0, localAddress(localIndex));
234 ASSERT_NOT_REACHED();
238 void buildSetGlobal(uint32_t globalIndex, int, WASMType type)
240 move(TrustedImmPtr(&m_module->globalVariables()[globalIndex]), GPRInfo::regT0);
244 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT1);
245 store32(GPRInfo::regT1, GPRInfo::regT0);
248 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT0);
249 storeDouble(FPRInfo::fpRegT0, GPRInfo::regT0);
252 ASSERT_NOT_REACHED();
257 void buildReturn(int, WASMExpressionType returnType)
260 JSValueRegs returnValueRegs(GPRInfo::returnValueGPR);
262 JSValueRegs returnValueRegs(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
264 switch (returnType) {
265 case WASMExpressionType::I32:
266 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::returnValueGPR);
268 or64(GPRInfo::tagTypeNumberRegister, GPRInfo::returnValueGPR);
270 move(TrustedImm32(JSValue::Int32Tag), GPRInfo::returnValueGPR2);
274 case WASMExpressionType::F32:
275 case WASMExpressionType::F64:
276 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT0);
277 if (returnType == WASMExpressionType::F32)
278 convertFloatToDouble(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
280 boxDouble(FPRInfo::fpRegT0, GPRInfo::returnValueGPR);
282 boxDouble(FPRInfo::fpRegT0, GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
286 case WASMExpressionType::Void:
287 moveTrustedValue(jsUndefined(), returnValueRegs);
290 ASSERT_NOT_REACHED();
292 emitFunctionEpilogue();
296 int buildImmediateI32(uint32_t immediate)
298 store32(Imm32(immediate), temporaryAddress(m_tempStackTop++));
302 int buildImmediateF32(float immediate)
304 store32(Imm32(bitwise_cast<int32_t>(immediate)), temporaryAddress(m_tempStackTop++));
308 int buildImmediateF64(double immediate)
311 store64(Imm64(bitwise_cast<int64_t>(immediate)), temporaryAddress(m_tempStackTop++));
315 int32_t int32Values[2];
318 store32(Imm32(u.int32Values[0]), temporaryAddress(m_tempStackTop - 1));
319 store32(Imm32(u.int32Values[1]), temporaryAddress(m_tempStackTop - 1).withOffset(4));
324 int buildGetLocal(uint32_t localIndex, WASMType type)
329 load32(localAddress(localIndex), GPRInfo::regT0);
330 store32(GPRInfo::regT0, temporaryAddress(m_tempStackTop++));
333 loadDouble(localAddress(localIndex), FPRInfo::fpRegT0);
334 storeDouble(FPRInfo::fpRegT0, temporaryAddress(m_tempStackTop++));
337 ASSERT_NOT_REACHED();
342 int buildGetGlobal(uint32_t globalIndex, WASMType type)
344 move(TrustedImmPtr(&m_module->globalVariables()[globalIndex]), GPRInfo::regT0);
348 load32(GPRInfo::regT0, GPRInfo::regT0);
349 store32(GPRInfo::regT0, temporaryAddress(m_tempStackTop++));
352 loadDouble(GPRInfo::regT0, FPRInfo::fpRegT0);
353 storeDouble(FPRInfo::fpRegT0, temporaryAddress(m_tempStackTop++));
356 ASSERT_NOT_REACHED();
361 int buildUnaryI32(int, WASMOpExpressionI32 op)
363 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT0);
365 case WASMOpExpressionI32::Negate:
366 neg32(GPRInfo::regT0);
368 case WASMOpExpressionI32::BitNot:
369 xor32(TrustedImm32(-1), GPRInfo::regT0);
371 case WASMOpExpressionI32::CountLeadingZeros:
372 countLeadingZeros32(GPRInfo::regT0, GPRInfo::regT0);
374 case WASMOpExpressionI32::LogicalNot: {
375 // FIXME: Don't use branches.
376 Jump zero = branchTest32(Zero, GPRInfo::regT0);
377 move(TrustedImm32(0), GPRInfo::regT0);
380 move(TrustedImm32(1), GPRInfo::regT0);
384 case WASMOpExpressionI32::Abs: {
385 // FIXME: Don't use branches.
386 Jump end = branchTest32(PositiveOrZero, GPRInfo::regT0);
387 neg32(GPRInfo::regT0);
392 ASSERT_NOT_REACHED();
394 store32(GPRInfo::regT0, temporaryAddress(m_tempStackTop - 1));
398 int buildUnaryF32(int, WASMOpExpressionF32 op)
400 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT1);
402 case WASMOpExpressionF32::Negate:
403 convertFloatToDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT1);
404 negateDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
405 convertDoubleToFloat(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
407 case WASMOpExpressionF32::Abs:
408 convertFloatToDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT1);
409 absDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
410 convertDoubleToFloat(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
412 case WASMOpExpressionF32::Ceil:
413 callOperation(ceilf, FPRInfo::fpRegT1, FPRInfo::fpRegT0);
415 case WASMOpExpressionF32::Floor:
416 callOperation(floorf, FPRInfo::fpRegT1, FPRInfo::fpRegT0);
418 case WASMOpExpressionF32::Sqrt:
419 convertFloatToDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT1);
420 sqrtDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
421 convertDoubleToFloat(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
424 ASSERT_NOT_REACHED();
426 storeDouble(FPRInfo::fpRegT0, temporaryAddress(m_tempStackTop - 1));
430 int buildBinaryI32(int, int, WASMOpExpressionI32 op)
432 load32(temporaryAddress(m_tempStackTop - 2), GPRInfo::regT0);
433 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT1);
435 case WASMOpExpressionI32::Add:
436 add32(GPRInfo::regT1, GPRInfo::regT0);
438 case WASMOpExpressionI32::Sub:
439 sub32(GPRInfo::regT1, GPRInfo::regT0);
441 case WASMOpExpressionI32::Mul:
442 mul32(GPRInfo::regT1, GPRInfo::regT0);
444 case WASMOpExpressionI32::SDiv:
445 case WASMOpExpressionI32::UDiv:
446 case WASMOpExpressionI32::SMod:
447 case WASMOpExpressionI32::UMod: {
448 m_divideErrorJumpList.append(branchTest32(Zero, GPRInfo::regT1));
449 if (op == WASMOpExpressionI32::SDiv || op == WASMOpExpressionI32::SMod) {
450 Jump denominatorNotNeg1 = branch32(NotEqual, GPRInfo::regT1, TrustedImm32(-1));
451 m_divideErrorJumpList.append(branch32(Equal, GPRInfo::regT0, TrustedImm32(-2147483647-1)));
452 denominatorNotNeg1.link(this);
454 #if CPU(X86) || CPU(X86_64)
455 ASSERT(GPRInfo::regT0 == X86Registers::eax);
456 move(GPRInfo::regT1, X86Registers::ecx);
457 if (op == WASMOpExpressionI32::SDiv || op == WASMOpExpressionI32::SMod) {
459 m_assembler.idivl_r(X86Registers::ecx);
461 ASSERT(op == WASMOpExpressionI32::UDiv || op == WASMOpExpressionI32::UMod);
462 xor32(X86Registers::edx, X86Registers::edx);
463 m_assembler.divl_r(X86Registers::ecx);
465 if (op == WASMOpExpressionI32::SMod || op == WASMOpExpressionI32::UMod)
466 move(X86Registers::edx, GPRInfo::regT0);
468 // FIXME: We should be able to do an inline div on ARMv7 and ARM64.
470 case WASMOpExpressionI32::SDiv:
471 callOperation(operationDiv, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT0);
473 case WASMOpExpressionI32::UDiv:
474 callOperation(operationUnsignedDiv, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT0);
476 case WASMOpExpressionI32::SMod:
477 callOperation(operationMod, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT0);
479 case WASMOpExpressionI32::UMod:
480 callOperation(operationUnsignedMod, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT0);
483 ASSERT_NOT_REACHED();
488 case WASMOpExpressionI32::BitOr:
489 or32(GPRInfo::regT1, GPRInfo::regT0);
491 case WASMOpExpressionI32::BitAnd:
492 and32(GPRInfo::regT1, GPRInfo::regT0);
494 case WASMOpExpressionI32::BitXor:
495 xor32(GPRInfo::regT1, GPRInfo::regT0);
497 case WASMOpExpressionI32::LeftShift:
498 lshift32(GPRInfo::regT1, GPRInfo::regT0);
500 case WASMOpExpressionI32::ArithmeticRightShift:
501 rshift32(GPRInfo::regT1, GPRInfo::regT0);
503 case WASMOpExpressionI32::LogicalRightShift:
504 urshift32(GPRInfo::regT1, GPRInfo::regT0);
507 ASSERT_NOT_REACHED();
510 store32(GPRInfo::regT0, temporaryAddress(m_tempStackTop - 1));
514 int buildBinaryF32(int, int, WASMOpExpressionF32 op)
516 loadDouble(temporaryAddress(m_tempStackTop - 2), FPRInfo::fpRegT0);
517 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT1);
518 convertFloatToDouble(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
519 convertFloatToDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT1);
521 case WASMOpExpressionF32::Add:
522 addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
524 case WASMOpExpressionF32::Sub:
525 subDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
527 case WASMOpExpressionF32::Mul:
528 mulDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
530 case WASMOpExpressionF32::Div:
531 divDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
534 RELEASE_ASSERT_NOT_REACHED();
536 convertDoubleToFloat(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
538 storeDouble(FPRInfo::fpRegT0, temporaryAddress(m_tempStackTop - 1));
542 int buildRelationalI32(int, int, WASMOpExpressionI32 op)
544 load32(temporaryAddress(m_tempStackTop - 2), GPRInfo::regT0);
545 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT1);
546 RelationalCondition condition;
548 case WASMOpExpressionI32::EqualI32:
551 case WASMOpExpressionI32::NotEqualI32:
552 condition = NotEqual;
554 case WASMOpExpressionI32::SLessThanI32:
555 condition = LessThan;
557 case WASMOpExpressionI32::ULessThanI32:
560 case WASMOpExpressionI32::SLessThanOrEqualI32:
561 condition = LessThanOrEqual;
563 case WASMOpExpressionI32::ULessThanOrEqualI32:
564 condition = BelowOrEqual;
566 case WASMOpExpressionI32::SGreaterThanI32:
567 condition = GreaterThan;
569 case WASMOpExpressionI32::UGreaterThanI32:
572 case WASMOpExpressionI32::SGreaterThanOrEqualI32:
573 condition = GreaterThanOrEqual;
575 case WASMOpExpressionI32::UGreaterThanOrEqualI32:
576 condition = AboveOrEqual;
579 RELEASE_ASSERT_NOT_REACHED();
581 compare32(condition, GPRInfo::regT0, GPRInfo::regT1, GPRInfo::regT0);
583 store32(GPRInfo::regT0, temporaryAddress(m_tempStackTop - 1));
587 int buildRelationalF32(int, int, WASMOpExpressionI32 op)
589 loadDouble(temporaryAddress(m_tempStackTop - 2), FPRInfo::fpRegT0);
590 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT1);
591 convertFloatToDouble(FPRInfo::fpRegT0, FPRInfo::fpRegT0);
592 convertFloatToDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT1);
593 DoubleCondition condition;
595 case WASMOpExpressionI32::EqualF32:
596 condition = DoubleEqual;
598 case WASMOpExpressionI32::NotEqualF32:
599 condition = DoubleNotEqual;
601 case WASMOpExpressionI32::LessThanF32:
602 condition = DoubleLessThan;
604 case WASMOpExpressionI32::LessThanOrEqualF32:
605 condition = DoubleLessThanOrEqual;
607 case WASMOpExpressionI32::GreaterThanF32:
608 condition = DoubleGreaterThan;
610 case WASMOpExpressionI32::GreaterThanOrEqualF32:
611 condition = DoubleGreaterThanOrEqual;
614 RELEASE_ASSERT_NOT_REACHED();
617 Jump trueCase = branchDouble(condition, FPRInfo::fpRegT0, FPRInfo::fpRegT1);
618 store32(TrustedImm32(0), temporaryAddress(m_tempStackTop - 1));
621 store32(TrustedImm32(1), temporaryAddress(m_tempStackTop - 1));
626 int buildRelationalF64(int, int, WASMOpExpressionI32 op)
628 loadDouble(temporaryAddress(m_tempStackTop - 2), FPRInfo::fpRegT0);
629 loadDouble(temporaryAddress(m_tempStackTop - 1), FPRInfo::fpRegT1);
630 DoubleCondition condition;
632 case WASMOpExpressionI32::EqualF64:
633 condition = DoubleEqual;
635 case WASMOpExpressionI32::NotEqualF64:
636 condition = DoubleNotEqual;
638 case WASMOpExpressionI32::LessThanF64:
639 condition = DoubleLessThan;
641 case WASMOpExpressionI32::LessThanOrEqualF64:
642 condition = DoubleLessThanOrEqual;
644 case WASMOpExpressionI32::GreaterThanF64:
645 condition = DoubleGreaterThan;
647 case WASMOpExpressionI32::GreaterThanOrEqualF64:
648 condition = DoubleGreaterThanOrEqual;
651 RELEASE_ASSERT_NOT_REACHED();
654 Jump trueCase = branchDouble(condition, FPRInfo::fpRegT0, FPRInfo::fpRegT1);
655 store32(TrustedImm32(0), temporaryAddress(m_tempStackTop - 1));
658 store32(TrustedImm32(1), temporaryAddress(m_tempStackTop - 1));
663 int buildCallInternal(uint32_t functionIndex, int, const WASMSignature& signature, WASMExpressionType returnType)
665 boxArgumentsAndAdjustStackPointer(signature.arguments);
667 JSFunction* function = m_module->functions()[functionIndex].get();
668 move(TrustedImmPtr(function), GPRInfo::regT0);
670 callAndUnboxResult(returnType);
674 int buildCallIndirect(uint32_t functionPointerTableIndex, int, int, const WASMSignature& signature, WASMExpressionType returnType)
676 boxArgumentsAndAdjustStackPointer(signature.arguments);
678 const Vector<JSFunction*>& functions = m_module->functionPointerTables()[functionPointerTableIndex].functions;
679 move(TrustedImmPtr(functions.data()), GPRInfo::regT0);
680 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT1);
682 and32(TrustedImm32(functions.size() - 1), GPRInfo::regT1);
683 loadPtr(BaseIndex(GPRInfo::regT0, GPRInfo::regT1, timesPtr()), GPRInfo::regT0);
685 callAndUnboxResult(returnType);
689 int buildCallImport(uint32_t functionImportIndex, int, const WASMSignature& signature, WASMExpressionType returnType)
691 boxArgumentsAndAdjustStackPointer(signature.arguments);
693 JSFunction* function = m_module->importedFunctions()[functionImportIndex].get();
694 move(TrustedImmPtr(function), GPRInfo::regT0);
696 callAndUnboxResult(returnType);
700 void appendExpressionList(int&, int) { }
702 void linkTarget(JumpTarget& target)
704 target.label = label();
705 target.jumpList.link(this);
708 void jumpToTarget(JumpTarget& target)
710 if (target.label.isSet())
713 target.jumpList.append(jump());
716 void jumpToTargetIf(JumpCondition condition, int, JumpTarget& target)
718 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT0);
720 Jump taken = branchTest32((condition == JumpCondition::Zero) ? Zero : NonZero, GPRInfo::regT0);
721 if (target.label.isSet())
722 taken.linkTo(target.label, this);
724 target.jumpList.append(taken);
729 m_breakTargets.append(JumpTarget());
730 m_continueTargets.append(JumpTarget());
735 m_breakTargets.removeLast();
736 m_continueTargets.removeLast();
741 m_breakTargets.append(JumpTarget());
746 m_breakTargets.removeLast();
751 m_breakLabelTargets.append(JumpTarget());
752 m_continueLabelTargets.append(JumpTarget());
754 linkTarget(m_continueLabelTargets.last());
759 linkTarget(m_breakLabelTargets.last());
761 m_breakLabelTargets.removeLast();
762 m_continueLabelTargets.removeLast();
765 JumpTarget& breakTarget()
767 return m_breakTargets.last();
770 JumpTarget& continueTarget()
772 return m_continueTargets.last();
775 JumpTarget& breakLabelTarget(uint32_t labelIndex)
777 return m_breakLabelTargets[labelIndex];
780 JumpTarget& continueLabelTarget(uint32_t labelIndex)
782 return m_continueLabelTargets[labelIndex];
785 void buildSwitch(int, const Vector<int64_t>& cases, Vector<JumpTarget>& targets, JumpTarget defaultTarget)
787 load32(temporaryAddress(m_tempStackTop - 1), GPRInfo::regT0);
789 BinarySwitch binarySwitch(GPRInfo::regT0, cases, BinarySwitch::Int32);
790 while (binarySwitch.advance(*this)) {
791 unsigned index = binarySwitch.caseIndex();
792 jump(targets[index].label);
794 binarySwitch.fallThrough().linkTo(defaultTarget.label, this);
804 Address localAddress(unsigned localIndex) const
806 ASSERT(localIndex < m_numberOfLocals);
807 return Address(GPRInfo::callFrameRegister, -(localIndex + 1) * sizeof(StackSlot));
810 Address temporaryAddress(unsigned temporaryIndex) const
812 ASSERT(m_numberOfLocals + temporaryIndex < m_stackHeight);
813 return Address(GPRInfo::callFrameRegister, -(m_numberOfLocals + temporaryIndex + 1) * sizeof(StackSlot));
816 void appendCall(const FunctionPtr& function)
818 m_calls.append(std::make_pair(call(), function.value()));
821 void appendCallWithExceptionCheck(const FunctionPtr& function)
823 appendCall(function);
824 m_exceptionChecks.append(emitExceptionCheck());
827 Call emitNakedCall(CodePtr function)
829 Call nakedCall = nearCall();
830 m_calls.append(std::make_pair(nakedCall, function.executableAddress()));
834 void appendCallSetResult(const FunctionPtr& function, GPRReg result)
836 appendCall(function);
837 move(GPRInfo::returnValueGPR, result);
841 void appendCallSetResult(const FunctionPtr& function, FPRReg result)
843 appendCall(function);
844 m_assembler.fstpl(0, stackPointerRegister);
845 loadDouble(stackPointerRegister, result);
847 #elif CPU(ARM) && !CPU(ARM_HARDFP)
848 void appendCallSetResult(const FunctionPtr& function, FPRReg result)
850 appendCall(function);
851 m_assembler.vmov(result, GPRInfo::returnValueGPR, GPRInfo::returnValueGPR2);
853 #else // CPU(X86_64) || (CPU(ARM) && CPU(ARM_HARDFP)) || CPU(ARM64) || CPU(MIPS) || CPU(SH4)
854 void appendCallSetResult(const FunctionPtr& function, FPRReg result)
856 appendCall(function);
857 moveDouble(FPRInfo::returnValueFPR, result);
862 void callOperation(Z_JITOperation_EJ operation, GPRReg src, GPRReg dst)
864 setupArgumentsWithExecState(src);
865 appendCallSetResult(operation, dst);
868 void callOperation(D_JITOperation_EJ operation, GPRReg src, FPRReg dst)
870 setupArgumentsWithExecState(src);
871 appendCallSetResult(operation, dst);
874 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
875 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
876 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
877 #define EABI_32BIT_DUMMY_ARG TrustedImm32(0),
879 #define EABI_32BIT_DUMMY_ARG
882 void callOperation(Z_JITOperation_EJ operation, GPRReg srcTag, GPRReg srcPayload, GPRReg dst)
884 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG srcPayload, srcTag);
885 appendCallSetResult(operation, dst);
888 void callOperation(D_JITOperation_EJ operation, GPRReg srcTag, GPRReg srcPayload, FPRReg dst)
890 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG srcPayload, srcTag);
891 appendCallSetResult(operation, dst);
895 void callOperation(float JIT_OPERATION (*operation)(float), FPRegisterID src, FPRegisterID dst)
898 appendCallSetResult(operation, dst);
901 void callOperation(int32_t JIT_OPERATION (*operation)(int32_t, int32_t), GPRReg src1, GPRReg src2, GPRReg dst)
903 setupArguments(src1, src2);
904 appendCallSetResult(operation, dst);
907 void callOperation(uint32_t JIT_OPERATION (*operation)(uint32_t, uint32_t), GPRReg src1, GPRReg src2, GPRReg dst)
909 setupArguments(src1, src2);
910 appendCallSetResult(operation, dst);
913 void boxArgumentsAndAdjustStackPointer(const Vector<WASMType>& arguments)
915 size_t argumentCount = arguments.size();
916 int stackOffset = -WTF::roundUpToMultipleOf(stackAlignmentRegisters(), m_numberOfLocals + m_tempStackTop + argumentCount + 1 + JSStack::CallFrameHeaderSize);
918 storeTrustedValue(jsUndefined(), Address(GPRInfo::callFrameRegister, (stackOffset + CallFrame::thisArgumentOffset()) * sizeof(Register)));
920 for (size_t i = 0; i < argumentCount; ++i) {
921 Address address(GPRInfo::callFrameRegister, (stackOffset + CallFrame::argumentOffset(i)) * sizeof(Register));
922 switch (arguments[i]) {
924 load32(temporaryAddress(m_tempStackTop - argumentCount + i), GPRInfo::regT0);
926 or64(GPRInfo::tagTypeNumberRegister, GPRInfo::regT0);
927 store64(GPRInfo::regT0, address);
929 store32(GPRInfo::regT0, address.withOffset(PayloadOffset));
930 store32(TrustedImm32(JSValue::Int32Tag), address.withOffset(TagOffset));
934 ASSERT_NOT_REACHED();
937 m_tempStackTop -= argumentCount;
939 addPtr(TrustedImm32(stackOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), GPRInfo::callFrameRegister, stackPointerRegister);
940 store32(TrustedImm32(argumentCount + 1), Address(stackPointerRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
943 void callAndUnboxResult(WASMExpressionType returnType)
945 // regT0 holds callee.
947 store64(GPRInfo::regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC)));
949 store32(GPRInfo::regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
950 store32(TrustedImm32(JSValue::CellTag), Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) + TagOffset - sizeof(CallerFrameAndPC)));
953 DataLabelPtr addressOfLinkedFunctionCheck;
954 Jump slowCase = branchPtrWithPatch(NotEqual, GPRInfo::regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
956 CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
957 info->setUpCall(CallLinkInfo::Call, CodeOrigin(), GPRInfo::regT0);
958 m_callCompilationInfo.append(CallCompilationInfo());
959 m_callCompilationInfo.last().hotPathBegin = addressOfLinkedFunctionCheck;
960 m_callCompilationInfo.last().callLinkInfo = info;
961 m_callCompilationInfo.last().hotPathOther = nearCall();
965 move(TrustedImmPtr(info), GPRInfo::regT2);
966 m_callCompilationInfo.last().callReturnLocation = emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).code());
969 addPtr(TrustedImm32(-WTF::roundUpToMultipleOf(stackAlignmentRegisters(), m_stackHeight) * sizeof(StackSlot) - maxFrameExtentForSlowPathCall), GPRInfo::callFrameRegister, stackPointerRegister);
970 checkStackPointerAlignment();
972 switch (returnType) {
973 case WASMExpressionType::I32:
974 store32(GPRInfo::returnValueGPR, temporaryAddress(m_tempStackTop++));
976 case WASMExpressionType::Void:
979 ASSERT_NOT_REACHED();
984 void loadValueAndConvertToInt32(Address address, GPRReg dst)
986 JSValueRegs tempRegs(dst);
987 loadValue(address, tempRegs);
988 Jump checkJSInt32 = branchIfInt32(tempRegs);
990 callOperation(operationConvertJSValueToInt32, dst, dst);
992 checkJSInt32.link(this);
995 void loadValueAndConvertToDouble(Address address, FPRReg dst, GPRReg scratch1, GPRReg scratch2)
997 JSValueRegs tempRegs(scratch1);
998 loadValue(address, tempRegs);
999 Jump checkJSInt32 = branchIfInt32(tempRegs);
1000 Jump checkJSNumber = branchIfNumber(tempRegs, scratch2);
1003 callOperation(operationConvertJSValueToDouble, tempRegs.gpr(), dst);
1006 checkJSInt32.link(this);
1007 convertInt32ToDouble(tempRegs.gpr(), dst);
1010 checkJSNumber.link(this);
1011 unboxDoubleWithoutAssertions(tempRegs.gpr(), dst);
1015 void loadValueAndConvertToInt32(Address address, GPRReg dst, GPRReg scratch)
1017 JSValueRegs tempRegs(scratch, dst);
1018 loadValue(address, tempRegs);
1019 Jump checkJSInt32 = branchIfInt32(tempRegs);
1021 callOperation(operationConvertJSValueToInt32, tempRegs.tagGPR(), tempRegs.payloadGPR(), dst);
1023 checkJSInt32.link(this);
1026 void loadValueAndConvertToDouble(Address address, FPRReg dst, GPRReg scratch1, GPRReg scratch2, GPRReg scratch3, FPRReg fpScratch)
1028 JSValueRegs tempRegs(scratch2, scratch1);
1029 loadValue(address, tempRegs);
1030 Jump checkJSInt32 = branchIfInt32(tempRegs);
1031 Jump checkJSNumber = branchIfNumber(tempRegs, scratch3);
1034 callOperation(operationConvertJSValueToDouble, tempRegs.tagGPR(), tempRegs.payloadGPR(), dst);
1037 checkJSInt32.link(this);
1038 convertInt32ToDouble(tempRegs.payloadGPR(), dst);
1041 checkJSNumber.link(this);
1042 unboxDouble(tempRegs.tagGPR(), tempRegs.payloadGPR(), dst, fpScratch);
1047 JSWASMModule* m_module;
1048 unsigned m_stackHeight;
1049 unsigned m_numberOfLocals;
1050 unsigned m_tempStackTop { 0 };
1052 Vector<JumpTarget> m_breakTargets;
1053 Vector<JumpTarget> m_continueTargets;
1054 Vector<JumpTarget> m_breakLabelTargets;
1055 Vector<JumpTarget> m_continueLabelTargets;
1058 Jump m_stackOverflow;
1059 JumpList m_divideErrorJumpList;
1060 JumpList m_exceptionChecks;
1062 Vector<std::pair<Call, void*>> m_calls;
1063 Vector<CallCompilationInfo> m_callCompilationInfo;
1068 #endif // ENABLE(WEBASSEMBLY)
1070 #endif // WASMFunctionCompiler_h