WebAssembly: clear out insignificant i32 bits when calling JavaScript
[WebKit-https.git] / Source / JavaScriptCore / wasm / WasmBinding.cpp
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmBinding.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "WasmCallingConvention.h"
39 #include "WasmExceptionType.h"
40
41 namespace JSC { namespace Wasm {
42
43 typedef CCallHelpers JIT;
44
45 static void materializeImportJSCell(VM* vm, JIT& jit, unsigned importIndex, GPRReg result)
46 {
47     // We're calling out of the current WebAssembly.Instance, which is identified on VM. That Instance has a list of all its import functions.
48     jit.loadPtr(&vm->topJSWebAssemblyInstance, result);
49     jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
50 }
51
52 static MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
53 {
54     const WasmCallingConvention& wasmCC = wasmCallingConvention();
55     const JSCCallingConvention& jsCC = jscCallingConvention();
56     const Signature* signature = SignatureInformation::get(vm, signatureIndex);
57     unsigned argCount = signature->argumentCount();
58     JIT jit(vm, nullptr);
59
60     // Below, we assume that the JS calling convention is always on the stack.
61     ASSERT(!jsCC.m_gprArgs.size());
62     ASSERT(!jsCC.m_fprArgs.size());
63
64     jit.emitFunctionPrologue();
65     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
66     jit.storePtr(JIT::TrustedImmPtr(vm->webAssemblyToJSCallee.get()), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
67
68
69     {
70         bool hasBadI64Use = false;
71         hasBadI64Use |= signature->returnType() == I64;
72         for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
73             Type argType = signature->argument(argNum);
74             switch (argType) {
75             case Void:
76             case Func:
77             case Anyfunc:
78                 RELEASE_ASSERT_NOT_REACHED();
79
80             case I64: {
81                 hasBadI64Use = true;
82                 break;
83             }
84
85             default:
86                 break;
87             }
88         }
89
90         if (hasBadI64Use) {
91             jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
92             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
93             auto call = jit.call();
94             jit.jumpToExceptionHandler();
95
96             void (*throwBadI64)(ExecState*) = [] (ExecState* exec) -> void {
97                 VM* vm = &exec->vm();
98                 NativeCallFrameTracer tracer(vm, exec);
99
100                 {
101                     auto throwScope = DECLARE_THROW_SCOPE(*vm);
102                     JSGlobalObject* globalObject = vm->topJSWebAssemblyInstance->globalObject();
103                     auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
104                     throwException(exec, throwScope, error);
105                 }
106
107                 genericUnwind(vm, exec);
108                 ASSERT(!!vm->callFrameForCatch);
109             };
110
111             LinkBuffer linkBuffer(*vm, jit, GLOBAL_THUNK_ID);
112             linkBuffer.link(call, throwBadI64);
113             return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
114         }
115     }
116
117     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
118     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
119     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
120     ASSERT(missingCalleeSaves.isEmpty());
121
122     // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
123
124     unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
125     unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
126     unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
127     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
128     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
129     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
130
131     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
132     
133     // First go through the integer parameters, freeing up their register for use afterwards.
134     {
135         unsigned marshalledGPRs = 0;
136         unsigned marshalledFPRs = 0;
137         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
138         unsigned frOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
139         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
140             Type argType = signature->argument(argNum);
141             switch (argType) {
142             case Void:
143             case Func:
144             case Anyfunc:
145             case I64:
146                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
147             case I32: {
148                 GPRReg gprReg;
149                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
150                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
151                 else {
152                     // We've already spilled all arguments, these registers are available as scratch.
153                     gprReg = GPRInfo::argumentGPR0;
154                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
155                     frOffset += sizeof(Register);
156                 }
157                 ++marshalledGPRs;
158                 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
159                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
160                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
161                 calleeFrameOffset += sizeof(Register);
162                 break;
163             }
164             case F32:
165             case F64:
166                 // Skipped: handled below.
167                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
168                     frOffset += sizeof(Register);
169                 ++marshalledFPRs;
170                 calleeFrameOffset += sizeof(Register);
171                 break;
172             }
173         }
174     }
175     
176     {
177         // Integer registers have already been spilled, these are now available.
178         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
179         GPRReg scratch = GPRInfo::argumentGPR1;
180         bool hasMaterializedDoubleEncodeOffset = false;
181         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
182             if (!hasMaterializedDoubleEncodeOffset) {
183                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
184                 jit.move(JIT::TrustedImm32(1), dest);
185                 jit.lshift64(JIT::TrustedImm32(48), dest);
186                 hasMaterializedDoubleEncodeOffset = true;
187             }
188         };
189
190         unsigned marshalledGPRs = 0;
191         unsigned marshalledFPRs = 0;
192         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
193         unsigned frOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
194         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
195             Type argType = signature->argument(argNum);
196             switch (argType) {
197             case Void:
198             case Func:
199             case Anyfunc:
200             case I64:
201                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
202             case I32:
203                 // Skipped: handled above.
204                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
205                     frOffset += sizeof(Register);
206                 ++marshalledGPRs;
207                 calleeFrameOffset += sizeof(Register);
208                 break;
209             case F32: {
210                 FPRReg fprReg;
211                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
212                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
213                 else {
214                     // We've already spilled all arguments, these registers are available as scratch.
215                     fprReg = FPRInfo::argumentFPR0;
216                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
217                     frOffset += sizeof(Register);
218                 }
219                 jit.convertFloatToDouble(fprReg, fprReg);
220                 jit.purifyNaN(fprReg);
221                 jit.moveDoubleTo64(fprReg, scratch);
222                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
223                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
224                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
225                 calleeFrameOffset += sizeof(Register);
226                 ++marshalledFPRs;
227                 break;
228             }
229             case F64: {
230                 FPRReg fprReg;
231                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
232                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
233                 else {
234                     // We've already spilled all arguments, these registers are available as scratch.
235                     fprReg = FPRInfo::argumentFPR0;
236                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
237                     frOffset += sizeof(Register);
238                 }
239                 jit.purifyNaN(fprReg);
240                 jit.moveDoubleTo64(fprReg, scratch);
241                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
242                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
243                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
244                 calleeFrameOffset += sizeof(Register);
245                 ++marshalledFPRs;
246                 break;
247             }
248             }
249         }
250     }
251
252     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
253     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
254
255     materializeImportJSCell(vm, jit, importIndex, importJSCellGPRReg);
256
257     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
258     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
259     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
260
261     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
262
263     CallLinkInfo* callLinkInfo = callLinkInfos.add();
264     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
265     JIT::DataLabelPtr targetToCheck;
266     JIT::TrustedImmPtr initialRightValue(0);
267     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
268     JIT::Call fastCall = jit.nearCall();
269     JIT::Jump done = jit.jump();
270     slowPath.link(&jit);
271     // Callee needs to be in regT0 here.
272     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
273     JIT::Call slowCall = jit.nearCall();
274     done.link(&jit);
275
276     CCallHelpers::JumpList exceptionChecks;
277
278     switch (signature->returnType()) {
279     case Void:
280         // Discard.
281         break;
282     case Func:
283     case Anyfunc:
284         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
285         RELEASE_ASSERT_NOT_REACHED();
286         break;
287     case I64: {
288         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
289     }
290     case I32: {
291         CCallHelpers::JumpList done;
292         CCallHelpers::JumpList slowPath;
293
294         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
295         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
296         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
297         done.append(jit.jump());
298
299         slowPath.link(&jit);
300         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
301         auto call = jit.call();
302         exceptionChecks.append(jit.emitJumpIfException());
303
304         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t { 
305             VM* vm = &exec->vm();
306             NativeCallFrameTracer tracer(vm, exec);
307             return v.toInt32(exec);
308         };
309         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
310             linkBuffer.link(call, convertToI32);
311         });
312
313         done.link(&jit);
314         break;
315     }
316     case F32: {
317         CCallHelpers::JumpList done;
318         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
319         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
320         // We're an int32
321         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
322         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
323         done.append(jit.jump());
324
325         isDouble.link(&jit);
326         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
327         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
328         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
329         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
330         done.append(jit.jump());
331
332         notANumber.link(&jit);
333         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
334         auto call = jit.call();
335         exceptionChecks.append(jit.emitJumpIfException());
336
337         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float { 
338             VM* vm = &exec->vm();
339             NativeCallFrameTracer tracer(vm, exec);
340             return static_cast<float>(v.toNumber(exec));
341         };
342         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
343             linkBuffer.link(call, convertToF32);
344         });
345
346         done.link(&jit);
347         break;
348     }
349     case F64: {
350         CCallHelpers::JumpList done;
351         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
352         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
353         // We're an int32
354         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
355         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
356         done.append(jit.jump());
357
358         isDouble.link(&jit);
359         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
360         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
361         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
362         done.append(jit.jump());
363
364         notANumber.link(&jit);
365         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
366         auto call = jit.call();
367         exceptionChecks.append(jit.emitJumpIfException());
368
369         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double { 
370             VM* vm = &exec->vm();
371             NativeCallFrameTracer tracer(vm, exec);
372             return v.toNumber(exec);
373         };
374         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
375             linkBuffer.link(call, convertToF64);
376         });
377
378         done.link(&jit);
379         break;
380     }
381     }
382
383     jit.emitFunctionEpilogue();
384     jit.ret();
385
386     if (!exceptionChecks.empty()) {
387         exceptionChecks.link(&jit);
388         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
389         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
390         auto call = jit.call();
391         jit.jumpToExceptionHandler();
392
393         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
394             VM* vm = &exec->vm();
395             NativeCallFrameTracer tracer(vm, exec);
396             genericUnwind(vm, exec);
397             ASSERT(!!vm->callFrameForCatch);
398         };
399
400         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
401             linkBuffer.link(call, doUnwinding);
402         });
403     }
404
405     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
406     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
407     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
408     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
409     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
410     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
411 #if !defined(NDEBUG)
412     String signatureDescription = SignatureInformation::get(vm, signatureIndex)->toString();
413 #else
414     String signatureDescription;
415 #endif
416     return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signatureDescription.ascii().data()));
417 }
418
419 static MacroAssemblerCodeRef wasmToWasm(VM* vm, unsigned importIndex)
420 {
421     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
422     JIT jit(vm, nullptr);
423
424     GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
425
426     // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
427     materializeImportJSCell(vm, jit, importIndex, scratch);
428
429     // Get the callee's WebAssembly.Instance and set it as vm.topJSWebAssemblyInstance. The caller will take care of restoring its own Instance.
430     GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
431     ASSERT(baseMemory != scratch);
432     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
433     jit.storePtr(baseMemory, &vm->topJSWebAssemblyInstance);
434
435     // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
436     // Set up the callee's baseMemory register as well as the memory size registers.
437     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
438     const auto& sizeRegs = pinnedRegs.sizeRegisters;
439     ASSERT(sizeRegs.size() >= 1);
440     ASSERT(sizeRegs[0].sizeRegister != baseMemory);
441     ASSERT(sizeRegs[0].sizeRegister != scratch);
442     ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
443     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
444     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
445     for (unsigned i = 1; i < sizeRegs.size(); ++i) {
446         ASSERT(sizeRegs[i].sizeRegister != baseMemory);
447         ASSERT(sizeRegs[i].sizeRegister != scratch);
448         jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
449     }
450
451     // Tail call into the callee WebAssembly function.
452     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntryPointCode()), scratch);
453     jit.jump(scratch);
454
455     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
456     return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
457 }
458
459 WasmExitStubs exitStubGenerator(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
460 {
461     WasmExitStubs stubs;
462     stubs.wasmToJs = wasmToJs(vm, callLinkInfos, signatureIndex, importIndex);
463     stubs.wasmToWasm = wasmToWasm(vm, importIndex);
464     return stubs;
465 }
466
467 } } // namespace JSC::Wasm
468
469 #endif // ENABLE(WEBASSEMBLY)