33c1c9b0f2b1d674b8a824eae48c5a9cc4e8ab63
[WebKit-https.git] / Source / JavaScriptCore / wasm / WasmBinding.cpp
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmBinding.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "WasmCallingConvention.h"
39 #include "WasmExceptionType.h"
40
41 namespace JSC { namespace Wasm {
42
43 typedef CCallHelpers JIT;
44
45 static void materializeImportJSCell(VM* vm, JIT& jit, unsigned importIndex, GPRReg result)
46 {
47     // We're calling out of the current WebAssembly.Instance, which is identified on VM. That Instance has a list of all its import functions.
48     jit.loadPtr(&vm->topJSWebAssemblyInstance, result);
49     jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
50 }
51
52 static MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
53 {
54     const WasmCallingConvention& wasmCC = wasmCallingConvention();
55     const JSCCallingConvention& jsCC = jscCallingConvention();
56     const Signature* signature = SignatureInformation::get(vm, signatureIndex);
57     unsigned argCount = signature->argumentCount();
58     JIT jit(vm, nullptr);
59
60     // Below, we assume that the JS calling convention is always on the stack.
61     ASSERT(!jsCC.m_gprArgs.size());
62     ASSERT(!jsCC.m_fprArgs.size());
63
64     jit.emitFunctionPrologue();
65     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
66     jit.storePtr(JIT::TrustedImmPtr(vm->webAssemblyToJSCallee.get()), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
67
68
69     {
70         bool hasBadI64Use = false;
71         hasBadI64Use |= signature->returnType() == I64;
72         for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
73             Type argType = signature->argument(argNum);
74             switch (argType) {
75             case Void:
76             case Func:
77             case Anyfunc:
78                 RELEASE_ASSERT_NOT_REACHED();
79
80             case I64: {
81                 hasBadI64Use = true;
82                 break;
83             }
84
85             default:
86                 break;
87             }
88         }
89
90         if (hasBadI64Use) {
91             jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
92             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
93             auto call = jit.call();
94             jit.jumpToExceptionHandler();
95
96             void (*throwBadI64)(ExecState*) = [] (ExecState* exec) -> void {
97                 VM* vm = &exec->vm();
98                 NativeCallFrameTracer tracer(vm, exec);
99
100                 {
101                     auto throwScope = DECLARE_THROW_SCOPE(*vm);
102                     JSGlobalObject* globalObject = vm->topJSWebAssemblyInstance->globalObject();
103                     auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
104                     throwException(exec, throwScope, error);
105                 }
106
107                 genericUnwind(vm, exec);
108                 ASSERT(!!vm->callFrameForCatch);
109             };
110
111             LinkBuffer linkBuffer(*vm, jit, GLOBAL_THUNK_ID);
112             linkBuffer.link(call, throwBadI64);
113             return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
114         }
115     }
116
117     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
118     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
119     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
120     ASSERT(missingCalleeSaves.isEmpty());
121
122     // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
123
124     unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
125     unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
126     unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
127     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
128     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
129     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
130
131     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
132     
133     // First go through the integer parameters, freeing up their register for use afterwards.
134     {
135         unsigned marshalledGPRs = 0;
136         unsigned marshalledFPRs = 0;
137         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
138         unsigned frOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
139         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
140             Type argType = signature->argument(argNum);
141             switch (argType) {
142             case Void:
143             case Func:
144             case Anyfunc:
145             case I64:
146                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
147             case I32: {
148                 GPRReg gprReg;
149                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
150                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
151                 else {
152                     // We've already spilled all arguments, these registers are available as scratch.
153                     gprReg = GPRInfo::argumentGPR0;
154                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
155                     frOffset += sizeof(Register);
156                 }
157                 ++marshalledGPRs;
158                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
159                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
160                 calleeFrameOffset += sizeof(Register);
161                 break;
162             }
163             case F32:
164             case F64:
165                 // Skipped: handled below.
166                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
167                     frOffset += sizeof(Register);
168                 ++marshalledFPRs;
169                 calleeFrameOffset += sizeof(Register);
170                 break;
171             }
172         }
173     }
174     
175     {
176         // Integer registers have already been spilled, these are now available.
177         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
178         GPRReg scratch = GPRInfo::argumentGPR1;
179         bool hasMaterializedDoubleEncodeOffset = false;
180         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
181             if (!hasMaterializedDoubleEncodeOffset) {
182                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
183                 jit.move(JIT::TrustedImm32(1), dest);
184                 jit.lshift64(JIT::TrustedImm32(48), dest);
185                 hasMaterializedDoubleEncodeOffset = true;
186             }
187         };
188
189         unsigned marshalledGPRs = 0;
190         unsigned marshalledFPRs = 0;
191         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
192         unsigned frOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
193         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
194             Type argType = signature->argument(argNum);
195             switch (argType) {
196             case Void:
197             case Func:
198             case Anyfunc:
199             case I64:
200                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
201             case I32:
202                 // Skipped: handled above.
203                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
204                     frOffset += sizeof(Register);
205                 ++marshalledGPRs;
206                 calleeFrameOffset += sizeof(Register);
207                 break;
208             case F32: {
209                 FPRReg fprReg;
210                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
211                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
212                 else {
213                     // We've already spilled all arguments, these registers are available as scratch.
214                     fprReg = FPRInfo::argumentFPR0;
215                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
216                     frOffset += sizeof(Register);
217                 }
218                 jit.convertFloatToDouble(fprReg, fprReg);
219                 jit.purifyNaN(fprReg);
220                 jit.moveDoubleTo64(fprReg, scratch);
221                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
222                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
223                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
224                 calleeFrameOffset += sizeof(Register);
225                 ++marshalledFPRs;
226                 break;
227             }
228             case F64: {
229                 FPRReg fprReg;
230                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
231                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
232                 else {
233                     // We've already spilled all arguments, these registers are available as scratch.
234                     fprReg = FPRInfo::argumentFPR0;
235                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
236                     frOffset += sizeof(Register);
237                 }
238                 jit.purifyNaN(fprReg);
239                 jit.moveDoubleTo64(fprReg, scratch);
240                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
241                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
242                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
243                 calleeFrameOffset += sizeof(Register);
244                 ++marshalledFPRs;
245                 break;
246             }
247             }
248         }
249     }
250
251     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
252     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
253
254     materializeImportJSCell(vm, jit, importIndex, importJSCellGPRReg);
255
256     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
257     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
258     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
259
260     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
261
262     CallLinkInfo* callLinkInfo = callLinkInfos.add();
263     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
264     JIT::DataLabelPtr targetToCheck;
265     JIT::TrustedImmPtr initialRightValue(0);
266     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
267     JIT::Call fastCall = jit.nearCall();
268     JIT::Jump done = jit.jump();
269     slowPath.link(&jit);
270     // Callee needs to be in regT0 here.
271     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
272     JIT::Call slowCall = jit.nearCall();
273     done.link(&jit);
274
275     CCallHelpers::JumpList exceptionChecks;
276
277     switch (signature->returnType()) {
278     case Void:
279         // Discard.
280         break;
281     case Func:
282     case Anyfunc:
283         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
284         RELEASE_ASSERT_NOT_REACHED();
285         break;
286     case I64: {
287         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
288     }
289     case I32: {
290         CCallHelpers::JumpList done;
291         CCallHelpers::JumpList slowPath;
292
293         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
294         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
295         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
296         done.append(jit.jump());
297
298         slowPath.link(&jit);
299         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
300         auto call = jit.call();
301         exceptionChecks.append(jit.emitJumpIfException());
302
303         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t { 
304             VM* vm = &exec->vm();
305             NativeCallFrameTracer tracer(vm, exec);
306             return v.toInt32(exec);
307         };
308         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
309             linkBuffer.link(call, convertToI32);
310         });
311
312         done.link(&jit);
313         break;
314     }
315     case F32: {
316         CCallHelpers::JumpList done;
317         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
318         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
319         // We're an int32
320         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
321         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
322         done.append(jit.jump());
323
324         isDouble.link(&jit);
325         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
326         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
327         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
328         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
329         done.append(jit.jump());
330
331         notANumber.link(&jit);
332         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
333         auto call = jit.call();
334         exceptionChecks.append(jit.emitJumpIfException());
335
336         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float { 
337             VM* vm = &exec->vm();
338             NativeCallFrameTracer tracer(vm, exec);
339             return static_cast<float>(v.toNumber(exec));
340         };
341         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
342             linkBuffer.link(call, convertToF32);
343         });
344
345         done.link(&jit);
346         break;
347     }
348     case F64: {
349         CCallHelpers::JumpList done;
350         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
351         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
352         // We're an int32
353         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
354         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
355         done.append(jit.jump());
356
357         isDouble.link(&jit);
358         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
359         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
360         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
361         done.append(jit.jump());
362
363         notANumber.link(&jit);
364         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
365         auto call = jit.call();
366         exceptionChecks.append(jit.emitJumpIfException());
367
368         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double { 
369             VM* vm = &exec->vm();
370             NativeCallFrameTracer tracer(vm, exec);
371             return v.toNumber(exec);
372         };
373         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
374             linkBuffer.link(call, convertToF64);
375         });
376
377         done.link(&jit);
378         break;
379     }
380     }
381
382     jit.emitFunctionEpilogue();
383     jit.ret();
384
385     if (!exceptionChecks.empty()) {
386         exceptionChecks.link(&jit);
387         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
388         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
389         auto call = jit.call();
390         jit.jumpToExceptionHandler();
391
392         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
393             VM* vm = &exec->vm();
394             NativeCallFrameTracer tracer(vm, exec);
395             genericUnwind(vm, exec);
396             ASSERT(!!vm->callFrameForCatch);
397         };
398
399         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
400             linkBuffer.link(call, doUnwinding);
401         });
402     }
403
404     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
405     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
406     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
407     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
408     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
409     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
410 #if !defined(NDEBUG)
411     String signatureDescription = SignatureInformation::get(vm, signatureIndex)->toString();
412 #else
413     String signatureDescription;
414 #endif
415     return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signatureDescription.ascii().data()));
416 }
417
418 static MacroAssemblerCodeRef wasmToWasm(VM* vm, unsigned importIndex)
419 {
420     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
421     JIT jit(vm, nullptr);
422
423     GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
424
425     // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
426     materializeImportJSCell(vm, jit, importIndex, scratch);
427
428     // Get the callee's WebAssembly.Instance and set it as vm.topJSWebAssemblyInstance. The caller will take care of restoring its own Instance.
429     GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
430     ASSERT(baseMemory != scratch);
431     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
432     jit.storePtr(baseMemory, &vm->topJSWebAssemblyInstance);
433
434     // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
435     // Set up the callee's baseMemory register as well as the memory size registers.
436     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
437     const auto& sizeRegs = pinnedRegs.sizeRegisters;
438     ASSERT(sizeRegs.size() >= 1);
439     ASSERT(sizeRegs[0].sizeRegister != baseMemory);
440     ASSERT(sizeRegs[0].sizeRegister != scratch);
441     ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
442     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
443     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
444     for (unsigned i = 1; i < sizeRegs.size(); ++i) {
445         ASSERT(sizeRegs[i].sizeRegister != baseMemory);
446         ASSERT(sizeRegs[i].sizeRegister != scratch);
447         jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
448     }
449
450     // Tail call into the callee WebAssembly function.
451     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntryPointCode()), scratch);
452     jit.jump(scratch);
453
454     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
455     return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
456 }
457
458 WasmExitStubs exitStubGenerator(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
459 {
460     WasmExitStubs stubs;
461     stubs.wasmToJs = wasmToJs(vm, callLinkInfos, signatureIndex, importIndex);
462     stubs.wasmToWasm = wasmToWasm(vm, importIndex);
463     return stubs;
464 }
465
466 } } // namespace JSC::Wasm
467
468 #endif // ENABLE(WEBASSEMBLY)