2 * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "WasmBinding.h"
29 #if ENABLE(WEBASSEMBLY)
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "WasmCallingConvention.h"
39 #include "WasmExceptionType.h"
41 namespace JSC { namespace Wasm {
43 using JIT = CCallHelpers;
45 static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
47 // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
48 jit.loadWasmContext(result);
49 jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
52 MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
54 // FIXME: This function doesn't properly abstract away the calling convention.
55 // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
56 const WasmCallingConvention& wasmCC = wasmCallingConvention();
57 const JSCCallingConvention& jsCC = jscCallingConvention();
58 const Signature& signature = SignatureInformation::get(signatureIndex);
59 unsigned argCount = signature.argumentCount();
62 // Below, we assume that the JS calling convention is always on the stack.
63 ASSERT(!jsCC.m_gprArgs.size());
64 ASSERT(!jsCC.m_fprArgs.size());
66 jit.emitFunctionPrologue();
67 jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
70 bool hasBadI64Use = false;
71 hasBadI64Use |= signature.returnType() == I64;
72 for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
73 Type argType = signature.argument(argNum);
78 RELEASE_ASSERT_NOT_REACHED();
91 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
92 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
93 jit.loadWasmContext(GPRInfo::argumentGPR1);
96 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR2);
97 jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
99 auto call = jit.call();
100 jit.jumpToExceptionHandler(*vm);
102 void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* wasmContext) -> void {
103 VM* vm = &exec->vm();
104 NativeCallFrameTracer tracer(vm, exec);
107 auto throwScope = DECLARE_THROW_SCOPE(*vm);
108 JSGlobalObject* globalObject = wasmContext->globalObject();
109 auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
110 throwException(exec, throwScope, error);
113 genericUnwind(vm, exec);
114 ASSERT(!!vm->callFrameForCatch);
117 LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
118 linkBuffer.link(call, throwBadI64);
119 return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
123 // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
124 RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
125 missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
126 ASSERT(missingCalleeSaves.isEmpty());
128 if (!Options::useCallICsForWebAssemblyToJSCalls()) {
129 ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
130 char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
131 unsigned marshalledGPRs = 0;
132 unsigned marshalledFPRs = 0;
133 unsigned bufferOffset = 0;
134 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
135 const GPRReg scratchGPR = GPRInfo::regCS0;
136 jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
137 jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
139 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
140 Type argType = signature.argument(argNum);
146 RELEASE_ASSERT_NOT_REACHED();
149 if (marshalledGPRs < wasmCC.m_gprArgs.size())
150 gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
152 // We've already spilled all arguments, these registers are available as scratch.
153 gprReg = GPRInfo::argumentGPR0;
154 jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
155 frOffset += sizeof(Register);
157 jit.zeroExtend32ToPtr(gprReg, gprReg);
158 jit.store64(gprReg, buffer + bufferOffset);
164 if (marshalledFPRs < wasmCC.m_fprArgs.size())
165 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
167 // We've already spilled all arguments, these registers are available as scratch.
168 fprReg = FPRInfo::argumentFPR0;
169 jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
170 frOffset += sizeof(Register);
172 jit.convertFloatToDouble(fprReg, fprReg);
173 jit.moveDoubleTo64(fprReg, scratchGPR);
174 jit.store64(scratchGPR, buffer + bufferOffset);
180 if (marshalledFPRs < wasmCC.m_fprArgs.size())
181 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
183 // We've already spilled all arguments, these registers are available as scratch.
184 fprReg = FPRInfo::argumentFPR0;
185 jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
186 frOffset += sizeof(Register);
188 jit.moveDoubleTo64(fprReg, scratchGPR);
189 jit.store64(scratchGPR, buffer + bufferOffset);
195 bufferOffset += sizeof(Register);
197 jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
199 // The GC should not look at this buffer at all, these aren't JSValues.
200 jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::argumentGPR0);
201 jit.storePtr(CCallHelpers::TrustedImmPtr(0), GPRInfo::argumentGPR0);
204 uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
205 [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t {
206 VM* vm = &exec->vm();
207 NativeCallFrameTracer tracer(vm, exec);
208 auto throwScope = DECLARE_THROW_SCOPE(*vm);
209 const Signature& signature = SignatureInformation::get(signatureIndex);
210 MarkedArgumentBuffer args;
211 for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
212 Type argType = signature.argument(argNum);
219 RELEASE_ASSERT_NOT_REACHED();
221 arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
225 arg = jsNumber(bitwise_cast<double>(buffer[argNum]));
232 CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
233 RELEASE_ASSERT(callType != CallType::None);
234 JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
235 RETURN_IF_EXCEPTION(throwScope, 0);
238 switch (signature.returnType()) {
242 RELEASE_ASSERT_NOT_REACHED();
247 realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
252 realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
257 RETURN_IF_EXCEPTION(throwScope, 0);
261 jit.loadWasmContext(GPRInfo::argumentGPR0);
262 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
263 jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
265 materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR1);
266 static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
267 jit.setupArgumentsWithExecState(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
268 auto call = jit.call();
269 auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
272 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
273 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
274 void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
275 VM* vm = &exec->vm();
276 NativeCallFrameTracer tracer(vm, exec);
277 genericUnwind(vm, exec);
278 ASSERT(!!vm->callFrameForCatch);
280 auto exceptionCall = jit.call();
281 jit.jumpToExceptionHandler(*vm);
283 noException.link(&jit);
284 switch (signature.returnType()) {
286 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
290 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
291 jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
298 jit.emitFunctionEpilogue();
301 LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
302 linkBuffer.link(call, callFunc);
303 linkBuffer.link(exceptionCall, doUnwinding);
305 return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
308 // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
310 const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
311 const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
312 const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
313 const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
314 jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
315 JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
317 // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
319 // First go through the integer parameters, freeing up their register for use afterwards.
321 unsigned marshalledGPRs = 0;
322 unsigned marshalledFPRs = 0;
323 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
324 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
325 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
326 Type argType = signature.argument(argNum);
332 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
335 if (marshalledGPRs < wasmCC.m_gprArgs.size())
336 gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
338 // We've already spilled all arguments, these registers are available as scratch.
339 gprReg = GPRInfo::argumentGPR0;
340 jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
341 frOffset += sizeof(Register);
344 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
345 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
346 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
347 calleeFrameOffset += sizeof(Register);
352 // Skipped: handled below.
353 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
354 frOffset += sizeof(Register);
356 calleeFrameOffset += sizeof(Register);
363 // Integer registers have already been spilled, these are now available.
364 GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
365 GPRReg scratch = GPRInfo::argumentGPR1;
366 bool hasMaterializedDoubleEncodeOffset = false;
367 auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
368 if (!hasMaterializedDoubleEncodeOffset) {
369 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
370 jit.move(JIT::TrustedImm32(1), dest);
371 jit.lshift64(JIT::TrustedImm32(48), dest);
372 hasMaterializedDoubleEncodeOffset = true;
376 unsigned marshalledGPRs = 0;
377 unsigned marshalledFPRs = 0;
378 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
379 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
380 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
381 Type argType = signature.argument(argNum);
387 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
389 // Skipped: handled above.
390 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
391 frOffset += sizeof(Register);
393 calleeFrameOffset += sizeof(Register);
397 if (marshalledFPRs < wasmCC.m_fprArgs.size())
398 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
400 // We've already spilled all arguments, these registers are available as scratch.
401 fprReg = FPRInfo::argumentFPR0;
402 jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
403 frOffset += sizeof(Register);
405 jit.convertFloatToDouble(fprReg, fprReg);
406 jit.purifyNaN(fprReg);
407 jit.moveDoubleTo64(fprReg, scratch);
408 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
409 jit.add64(doubleEncodeOffsetGPRReg, scratch);
410 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
411 calleeFrameOffset += sizeof(Register);
417 if (marshalledFPRs < wasmCC.m_fprArgs.size())
418 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
420 // We've already spilled all arguments, these registers are available as scratch.
421 fprReg = FPRInfo::argumentFPR0;
422 jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
423 frOffset += sizeof(Register);
425 jit.purifyNaN(fprReg);
426 jit.moveDoubleTo64(fprReg, scratch);
427 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
428 jit.add64(doubleEncodeOffsetGPRReg, scratch);
429 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
430 calleeFrameOffset += sizeof(Register);
438 jit.loadWasmContext(GPRInfo::argumentGPR0);
439 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
440 jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
442 GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
443 ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
445 materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
447 jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
448 jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
449 jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
451 // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
453 CallLinkInfo* callLinkInfo = callLinkInfos.add();
454 callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
455 JIT::DataLabelPtr targetToCheck;
456 JIT::TrustedImmPtr initialRightValue(0);
457 JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
458 JIT::Call fastCall = jit.nearCall();
459 JIT::Jump done = jit.jump();
461 // Callee needs to be in regT0 here.
462 jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
463 JIT::Call slowCall = jit.nearCall();
466 CCallHelpers::JumpList exceptionChecks;
468 switch (signature.returnType()) {
474 // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
475 RELEASE_ASSERT_NOT_REACHED();
478 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
481 CCallHelpers::JumpList done;
482 CCallHelpers::JumpList slowPath;
484 slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
485 slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
486 jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
487 done.append(jit.jump());
490 jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
491 auto call = jit.call();
492 exceptionChecks.append(jit.emitJumpIfException(*vm));
494 int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t {
495 VM* vm = &exec->vm();
496 NativeCallFrameTracer tracer(vm, exec);
497 return v.toInt32(exec);
499 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
500 linkBuffer.link(call, convertToI32);
507 CCallHelpers::JumpList done;
508 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
509 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
511 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
512 jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
513 done.append(jit.jump());
516 jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
517 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
518 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
519 jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
520 done.append(jit.jump());
522 notANumber.link(&jit);
523 jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
524 auto call = jit.call();
525 exceptionChecks.append(jit.emitJumpIfException(*vm));
527 float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float {
528 VM* vm = &exec->vm();
529 NativeCallFrameTracer tracer(vm, exec);
530 return static_cast<float>(v.toNumber(exec));
532 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
533 linkBuffer.link(call, convertToF32);
540 CCallHelpers::JumpList done;
541 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
542 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
544 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
545 jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
546 done.append(jit.jump());
549 jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
550 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
551 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
552 done.append(jit.jump());
554 notANumber.link(&jit);
555 jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
556 auto call = jit.call();
557 exceptionChecks.append(jit.emitJumpIfException(*vm));
559 double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double {
560 VM* vm = &exec->vm();
561 NativeCallFrameTracer tracer(vm, exec);
562 return v.toNumber(exec);
564 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
565 linkBuffer.link(call, convertToF64);
573 jit.emitFunctionEpilogue();
576 if (!exceptionChecks.empty()) {
577 exceptionChecks.link(&jit);
578 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
579 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
580 auto call = jit.call();
581 jit.jumpToExceptionHandler(*vm);
583 void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
584 VM* vm = &exec->vm();
585 NativeCallFrameTracer tracer(vm, exec);
586 genericUnwind(vm, exec);
587 ASSERT(!!vm->callFrameForCatch);
590 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
591 linkBuffer.link(call, doUnwinding);
595 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
596 patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
597 CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
598 CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
599 CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
600 callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
602 return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
605 MacroAssemblerCodeRef wasmToWasm(unsigned importIndex)
607 const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
610 GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
612 // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
613 materializeImportJSCell(jit, importIndex, scratch);
615 // Get the callee's WebAssembly.Instance and set it as WasmContext. The caller will take care of restoring its own Instance.
616 GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
617 ASSERT(baseMemory != scratch);
618 jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
619 jit.storeWasmContext(baseMemory);
621 // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
622 // Set up the callee's baseMemory register as well as the memory size registers.
623 jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
624 const auto& sizeRegs = pinnedRegs.sizeRegisters;
625 ASSERT(sizeRegs.size() >= 1);
626 ASSERT(sizeRegs[0].sizeRegister != baseMemory);
627 ASSERT(sizeRegs[0].sizeRegister != scratch);
628 ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
629 jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
630 jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
631 for (unsigned i = 1; i < sizeRegs.size(); ++i) {
632 ASSERT(sizeRegs[i].sizeRegister != baseMemory);
633 ASSERT(sizeRegs[i].sizeRegister != scratch);
634 jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
637 // Tail call into the callee WebAssembly function.
638 jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntrypointLoadLocation()), scratch);
639 jit.loadPtr(scratch, scratch);
642 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
643 return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
646 } } // namespace JSC::Wasm
648 #endif // ENABLE(WEBASSEMBLY)