How we build polymorphic cases is wrong when making a call from Wasm
[WebKit-https.git] / Source / JavaScriptCore / wasm / WasmBinding.cpp
1 /*
2  * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmBinding.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "WasmCallingConvention.h"
39 #include "WasmExceptionType.h"
40
41 namespace JSC { namespace Wasm {
42
43 using JIT = CCallHelpers;
44
45 static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
46 {
47     // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
48     jit.loadWasmContext(result);
49     jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
50 }
51
52 MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
53 {
54     // FIXME: This function doesn't properly abstract away the calling convention.
55     // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
56     const WasmCallingConvention& wasmCC = wasmCallingConvention();
57     const JSCCallingConvention& jsCC = jscCallingConvention();
58     const Signature& signature = SignatureInformation::get(signatureIndex);
59     unsigned argCount = signature.argumentCount();
60     JIT jit;
61
62     // Below, we assume that the JS calling convention is always on the stack.
63     ASSERT(!jsCC.m_gprArgs.size());
64     ASSERT(!jsCC.m_fprArgs.size());
65
66     jit.emitFunctionPrologue();
67     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
68
69     {
70         bool hasBadI64Use = false;
71         hasBadI64Use |= signature.returnType() == I64;
72         for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
73             Type argType = signature.argument(argNum);
74             switch (argType) {
75             case Void:
76             case Func:
77             case Anyfunc:
78                 RELEASE_ASSERT_NOT_REACHED();
79
80             case I64: {
81                 hasBadI64Use = true;
82                 break;
83             }
84
85             default:
86                 break;
87             }
88         }
89
90         if (hasBadI64Use) {
91             jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
92             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
93             jit.loadWasmContext(GPRInfo::argumentGPR1);
94
95             // Store Callee.
96             jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR2);
97             jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
98
99             auto call = jit.call();
100             jit.jumpToExceptionHandler(*vm);
101
102             void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* wasmContext) -> void {
103                 VM* vm = &exec->vm();
104                 NativeCallFrameTracer tracer(vm, exec);
105
106                 {
107                     auto throwScope = DECLARE_THROW_SCOPE(*vm);
108                     JSGlobalObject* globalObject = wasmContext->globalObject();
109                     auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
110                     throwException(exec, throwScope, error);
111                 }
112
113                 genericUnwind(vm, exec);
114                 ASSERT(!!vm->callFrameForCatch);
115             };
116
117             LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
118             linkBuffer.link(call, throwBadI64);
119             return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
120         }
121     }
122
123     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
124     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
125     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
126     ASSERT(missingCalleeSaves.isEmpty());
127
128     if (!Options::useCallICsForWebAssemblyToJSCalls()) {
129         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
130         char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
131         unsigned marshalledGPRs = 0;
132         unsigned marshalledFPRs = 0;
133         unsigned bufferOffset = 0;
134         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
135         const GPRReg scratchGPR = GPRInfo::regCS0;
136         jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
137         jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
138
139         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
140             Type argType = signature.argument(argNum);
141             switch (argType) {
142             case Void:
143             case Func:
144             case Anyfunc:
145             case I64:
146                 RELEASE_ASSERT_NOT_REACHED();
147             case I32: {
148                 GPRReg gprReg;
149                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
150                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
151                 else {
152                     // We've already spilled all arguments, these registers are available as scratch.
153                     gprReg = GPRInfo::argumentGPR0;
154                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
155                     frOffset += sizeof(Register);
156                 }
157                 jit.zeroExtend32ToPtr(gprReg, gprReg);
158                 jit.store64(gprReg, buffer + bufferOffset);
159                 ++marshalledGPRs;
160                 break;
161             }
162             case F32: {
163                 FPRReg fprReg;
164                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
165                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
166                 else {
167                     // We've already spilled all arguments, these registers are available as scratch.
168                     fprReg = FPRInfo::argumentFPR0;
169                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
170                     frOffset += sizeof(Register);
171                 }
172                 jit.convertFloatToDouble(fprReg, fprReg);
173                 jit.moveDoubleTo64(fprReg, scratchGPR);
174                 jit.store64(scratchGPR, buffer + bufferOffset);
175                 ++marshalledFPRs;
176                 break;
177             }
178             case F64: {
179                 FPRReg fprReg;
180                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
181                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
182                 else {
183                     // We've already spilled all arguments, these registers are available as scratch.
184                     fprReg = FPRInfo::argumentFPR0;
185                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
186                     frOffset += sizeof(Register);
187                 }
188                 jit.moveDoubleTo64(fprReg, scratchGPR);
189                 jit.store64(scratchGPR, buffer + bufferOffset);
190                 ++marshalledFPRs;
191                 break;
192             }
193             }
194
195             bufferOffset += sizeof(Register);
196         }
197         jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
198         if (argCount) {
199             // The GC should not look at this buffer at all, these aren't JSValues.
200             jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::argumentGPR0);
201             jit.storePtr(CCallHelpers::TrustedImmPtr(0), GPRInfo::argumentGPR0);
202         }
203
204         uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
205             [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t { 
206                 VM* vm = &exec->vm();
207                 NativeCallFrameTracer tracer(vm, exec);
208                 auto throwScope = DECLARE_THROW_SCOPE(*vm);
209                 const Signature& signature = SignatureInformation::get(signatureIndex);
210                 MarkedArgumentBuffer args;
211                 for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
212                     Type argType = signature.argument(argNum);
213                     JSValue arg;
214                     switch (argType) {
215                     case Void:
216                     case Func:
217                     case Anyfunc:
218                     case I64:
219                         RELEASE_ASSERT_NOT_REACHED();
220                     case I32:
221                         arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
222                         break;
223                     case F32:
224                     case F64:
225                         arg = jsNumber(bitwise_cast<double>(buffer[argNum]));
226                         break;
227                     }
228                     args.append(arg);
229                 }
230
231                 CallData callData;
232                 CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
233                 RELEASE_ASSERT(callType != CallType::None);
234                 JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
235                 RETURN_IF_EXCEPTION(throwScope, 0);
236
237                 uint64_t realResult;
238                 switch (signature.returnType()) {
239                 case Func:
240                 case Anyfunc:
241                 case I64:
242                     RELEASE_ASSERT_NOT_REACHED();
243                     break;
244                 case Void:
245                     break;
246                 case I32: {
247                     realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
248                     break;
249                 }
250                 case F64:
251                 case F32: {
252                     realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
253                     break;
254                 }
255                 }
256
257                 RETURN_IF_EXCEPTION(throwScope, 0);
258                 return realResult;
259             };
260         
261         jit.loadWasmContext(GPRInfo::argumentGPR0);
262         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
263         jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
264         
265         materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR1);
266         static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
267         jit.setupArgumentsWithExecState(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
268         auto call = jit.call();
269         auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
270
271         // exception here.
272         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
273         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
274         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
275             VM* vm = &exec->vm();
276             NativeCallFrameTracer tracer(vm, exec);
277             genericUnwind(vm, exec);
278             ASSERT(!!vm->callFrameForCatch);
279         };
280         auto exceptionCall = jit.call();
281         jit.jumpToExceptionHandler(*vm);
282
283         noException.link(&jit);
284         switch (signature.returnType()) {
285         case F64: {
286             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
287             break;
288         }
289         case F32: {
290             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
291             jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
292             break;
293         }
294         default:
295             break;
296         }
297
298         jit.emitFunctionEpilogue();
299         jit.ret();
300
301         LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
302         linkBuffer.link(call, callFunc);
303         linkBuffer.link(exceptionCall, doUnwinding);
304
305         return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
306     }
307
308     // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
309
310     const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
311     const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
312     const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
313     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
314     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
315     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
316
317     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
318     
319     // First go through the integer parameters, freeing up their register for use afterwards.
320     {
321         unsigned marshalledGPRs = 0;
322         unsigned marshalledFPRs = 0;
323         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
324         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
325         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
326             Type argType = signature.argument(argNum);
327             switch (argType) {
328             case Void:
329             case Func:
330             case Anyfunc:
331             case I64:
332                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
333             case I32: {
334                 GPRReg gprReg;
335                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
336                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
337                 else {
338                     // We've already spilled all arguments, these registers are available as scratch.
339                     gprReg = GPRInfo::argumentGPR0;
340                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
341                     frOffset += sizeof(Register);
342                 }
343                 ++marshalledGPRs;
344                 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
345                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
346                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
347                 calleeFrameOffset += sizeof(Register);
348                 break;
349             }
350             case F32:
351             case F64:
352                 // Skipped: handled below.
353                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
354                     frOffset += sizeof(Register);
355                 ++marshalledFPRs;
356                 calleeFrameOffset += sizeof(Register);
357                 break;
358             }
359         }
360     }
361     
362     {
363         // Integer registers have already been spilled, these are now available.
364         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
365         GPRReg scratch = GPRInfo::argumentGPR1;
366         bool hasMaterializedDoubleEncodeOffset = false;
367         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
368             if (!hasMaterializedDoubleEncodeOffset) {
369                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
370                 jit.move(JIT::TrustedImm32(1), dest);
371                 jit.lshift64(JIT::TrustedImm32(48), dest);
372                 hasMaterializedDoubleEncodeOffset = true;
373             }
374         };
375
376         unsigned marshalledGPRs = 0;
377         unsigned marshalledFPRs = 0;
378         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
379         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
380         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
381             Type argType = signature.argument(argNum);
382             switch (argType) {
383             case Void:
384             case Func:
385             case Anyfunc:
386             case I64:
387                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
388             case I32:
389                 // Skipped: handled above.
390                 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
391                     frOffset += sizeof(Register);
392                 ++marshalledGPRs;
393                 calleeFrameOffset += sizeof(Register);
394                 break;
395             case F32: {
396                 FPRReg fprReg;
397                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
398                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
399                 else {
400                     // We've already spilled all arguments, these registers are available as scratch.
401                     fprReg = FPRInfo::argumentFPR0;
402                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
403                     frOffset += sizeof(Register);
404                 }
405                 jit.convertFloatToDouble(fprReg, fprReg);
406                 jit.purifyNaN(fprReg);
407                 jit.moveDoubleTo64(fprReg, scratch);
408                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
409                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
410                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
411                 calleeFrameOffset += sizeof(Register);
412                 ++marshalledFPRs;
413                 break;
414             }
415             case F64: {
416                 FPRReg fprReg;
417                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
418                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
419                 else {
420                     // We've already spilled all arguments, these registers are available as scratch.
421                     fprReg = FPRInfo::argumentFPR0;
422                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
423                     frOffset += sizeof(Register);
424                 }
425                 jit.purifyNaN(fprReg);
426                 jit.moveDoubleTo64(fprReg, scratch);
427                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
428                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
429                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
430                 calleeFrameOffset += sizeof(Register);
431                 ++marshalledFPRs;
432                 break;
433             }
434             }
435         }
436     }
437
438     jit.loadWasmContext(GPRInfo::argumentGPR0);
439     jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
440     jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
441
442     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
443     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
444
445     materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
446
447     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
448     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
449     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
450
451     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
452
453     CallLinkInfo* callLinkInfo = callLinkInfos.add();
454     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
455     JIT::DataLabelPtr targetToCheck;
456     JIT::TrustedImmPtr initialRightValue(0);
457     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
458     JIT::Call fastCall = jit.nearCall();
459     JIT::Jump done = jit.jump();
460     slowPath.link(&jit);
461     // Callee needs to be in regT0 here.
462     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
463     JIT::Call slowCall = jit.nearCall();
464     done.link(&jit);
465
466     CCallHelpers::JumpList exceptionChecks;
467
468     switch (signature.returnType()) {
469     case Void:
470         // Discard.
471         break;
472     case Func:
473     case Anyfunc:
474         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
475         RELEASE_ASSERT_NOT_REACHED();
476         break;
477     case I64: {
478         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
479     }
480     case I32: {
481         CCallHelpers::JumpList done;
482         CCallHelpers::JumpList slowPath;
483
484         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
485         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
486         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
487         done.append(jit.jump());
488
489         slowPath.link(&jit);
490         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
491         auto call = jit.call();
492         exceptionChecks.append(jit.emitJumpIfException(*vm));
493
494         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t { 
495             VM* vm = &exec->vm();
496             NativeCallFrameTracer tracer(vm, exec);
497             return v.toInt32(exec);
498         };
499         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
500             linkBuffer.link(call, convertToI32);
501         });
502
503         done.link(&jit);
504         break;
505     }
506     case F32: {
507         CCallHelpers::JumpList done;
508         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
509         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
510         // We're an int32
511         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
512         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
513         done.append(jit.jump());
514
515         isDouble.link(&jit);
516         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
517         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
518         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
519         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
520         done.append(jit.jump());
521
522         notANumber.link(&jit);
523         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
524         auto call = jit.call();
525         exceptionChecks.append(jit.emitJumpIfException(*vm));
526
527         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float { 
528             VM* vm = &exec->vm();
529             NativeCallFrameTracer tracer(vm, exec);
530             return static_cast<float>(v.toNumber(exec));
531         };
532         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
533             linkBuffer.link(call, convertToF32);
534         });
535
536         done.link(&jit);
537         break;
538     }
539     case F64: {
540         CCallHelpers::JumpList done;
541         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
542         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
543         // We're an int32
544         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
545         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
546         done.append(jit.jump());
547
548         isDouble.link(&jit);
549         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
550         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
551         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
552         done.append(jit.jump());
553
554         notANumber.link(&jit);
555         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
556         auto call = jit.call();
557         exceptionChecks.append(jit.emitJumpIfException(*vm));
558
559         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double { 
560             VM* vm = &exec->vm();
561             NativeCallFrameTracer tracer(vm, exec);
562             return v.toNumber(exec);
563         };
564         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
565             linkBuffer.link(call, convertToF64);
566         });
567
568         done.link(&jit);
569         break;
570     }
571     }
572
573     jit.emitFunctionEpilogue();
574     jit.ret();
575
576     if (!exceptionChecks.empty()) {
577         exceptionChecks.link(&jit);
578         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
579         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
580         auto call = jit.call();
581         jit.jumpToExceptionHandler(*vm);
582
583         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
584             VM* vm = &exec->vm();
585             NativeCallFrameTracer tracer(vm, exec);
586             genericUnwind(vm, exec);
587             ASSERT(!!vm->callFrameForCatch);
588         };
589
590         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
591             linkBuffer.link(call, doUnwinding);
592         });
593     }
594
595     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
596     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
597     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
598     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
599     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
600     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
601
602     return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
603 }
604
605 MacroAssemblerCodeRef wasmToWasm(unsigned importIndex)
606 {
607     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
608     JIT jit;
609
610     GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
611
612     // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
613     materializeImportJSCell(jit, importIndex, scratch);
614
615     // Get the callee's WebAssembly.Instance and set it as WasmContext. The caller will take care of restoring its own Instance.
616     GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
617     ASSERT(baseMemory != scratch);
618     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
619     jit.storeWasmContext(baseMemory);
620
621     // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
622     // Set up the callee's baseMemory register as well as the memory size registers.
623     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
624     const auto& sizeRegs = pinnedRegs.sizeRegisters;
625     ASSERT(sizeRegs.size() >= 1);
626     ASSERT(sizeRegs[0].sizeRegister != baseMemory);
627     ASSERT(sizeRegs[0].sizeRegister != scratch);
628     ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
629     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
630     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
631     for (unsigned i = 1; i < sizeRegs.size(); ++i) {
632         ASSERT(sizeRegs[i].sizeRegister != baseMemory);
633         ASSERT(sizeRegs[i].sizeRegister != scratch);
634         jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
635     }
636
637     // Tail call into the callee WebAssembly function.
638     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntrypointLoadLocation()), scratch);
639     jit.loadPtr(scratch, scratch);
640     jit.jump(scratch);
641
642     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
643     return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
644 }
645
646 } } // namespace JSC::Wasm
647
648 #endif // ENABLE(WEBASSEMBLY)