WebAssembly: store state in TLS instead of on VM
[WebKit-https.git] / Source / JavaScriptCore / wasm / WasmBinding.cpp
1 /*
2  * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmBinding.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "WasmCallingConvention.h"
39 #include "WasmContext.h"
40 #include "WasmExceptionType.h"
41
42 namespace JSC { namespace Wasm {
43
44 typedef CCallHelpers JIT;
45
46 static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
47 {
48     // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
49     jit.loadWasmContext(result);
50     jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
51 }
52
53 static MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
54 {
55     // FIXME: This function doesn't properly abstract away the calling convention.
56     // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
57     const WasmCallingConvention& wasmCC = wasmCallingConvention();
58     const JSCCallingConvention& jsCC = jscCallingConvention();
59     const Signature* signature = SignatureInformation::get(vm, signatureIndex);
60     unsigned argCount = signature->argumentCount();
61     JIT jit(vm, nullptr);
62
63     // Below, we assume that the JS calling convention is always on the stack.
64     ASSERT(!jsCC.m_gprArgs.size());
65     ASSERT(!jsCC.m_fprArgs.size());
66
67     jit.emitFunctionPrologue();
68     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
69     jit.storePtr(JIT::TrustedImmPtr(vm->webAssemblyToJSCallee.get()), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
70
71
72     {
73         bool hasBadI64Use = false;
74         hasBadI64Use |= signature->returnType() == I64;
75         for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
76             Type argType = signature->argument(argNum);
77             switch (argType) {
78             case Void:
79             case Func:
80             case Anyfunc:
81                 RELEASE_ASSERT_NOT_REACHED();
82
83             case I64: {
84                 hasBadI64Use = true;
85                 break;
86             }
87
88             default:
89                 break;
90             }
91         }
92
93         if (hasBadI64Use) {
94             jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
95             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
96             auto call = jit.call();
97             jit.jumpToExceptionHandler();
98
99             void (*throwBadI64)(ExecState*) = [] (ExecState* exec) -> void {
100                 VM* vm = &exec->vm();
101                 NativeCallFrameTracer tracer(vm, exec);
102
103                 {
104                     auto throwScope = DECLARE_THROW_SCOPE(*vm);
105                     JSGlobalObject* globalObject = loadWasmContext(*vm)->globalObject();
106                     auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
107                     throwException(exec, throwScope, error);
108                 }
109
110                 genericUnwind(vm, exec);
111                 ASSERT(!!vm->callFrameForCatch);
112             };
113
114             LinkBuffer linkBuffer(*vm, jit, GLOBAL_THUNK_ID);
115             linkBuffer.link(call, throwBadI64);
116             return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
117         }
118     }
119
120     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
121     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
122     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
123     ASSERT(missingCalleeSaves.isEmpty());
124
125     // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
126
127     const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
128     const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
129     const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
130     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
131     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
132     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
133
134     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
135     
136     // First go through the integer parameters, freeing up their register for use afterwards.
137     {
138         unsigned marshalledGPRs = 0;
139         unsigned marshalledFPRs = 0;
140         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
141         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
142         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
143             Type argType = signature->argument(argNum);
144             switch (argType) {
145             case Void:
146             case Func:
147             case Anyfunc:
148             case I64:
149                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
150             case I32: {
151                 GPRReg gprReg;
152                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
153                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
154                 else {
155                     // We've already spilled all arguments, these registers are available as scratch.
156                     gprReg = GPRInfo::argumentGPR0;
157                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
158                     frOffset += sizeof(Register);
159                 }
160                 ++marshalledGPRs;
161                 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
162                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
163                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
164                 calleeFrameOffset += sizeof(Register);
165                 break;
166             }
167             case F32:
168             case F64:
169                 // Skipped: handled below.
170                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
171                     frOffset += sizeof(Register);
172                 ++marshalledFPRs;
173                 calleeFrameOffset += sizeof(Register);
174                 break;
175             }
176         }
177     }
178     
179     {
180         // Integer registers have already been spilled, these are now available.
181         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
182         GPRReg scratch = GPRInfo::argumentGPR1;
183         bool hasMaterializedDoubleEncodeOffset = false;
184         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
185             if (!hasMaterializedDoubleEncodeOffset) {
186                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
187                 jit.move(JIT::TrustedImm32(1), dest);
188                 jit.lshift64(JIT::TrustedImm32(48), dest);
189                 hasMaterializedDoubleEncodeOffset = true;
190             }
191         };
192
193         unsigned marshalledGPRs = 0;
194         unsigned marshalledFPRs = 0;
195         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
196         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
197         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
198             Type argType = signature->argument(argNum);
199             switch (argType) {
200             case Void:
201             case Func:
202             case Anyfunc:
203             case I64:
204                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
205             case I32:
206                 // Skipped: handled above.
207                 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
208                     frOffset += sizeof(Register);
209                 ++marshalledGPRs;
210                 calleeFrameOffset += sizeof(Register);
211                 break;
212             case F32: {
213                 FPRReg fprReg;
214                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
215                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
216                 else {
217                     // We've already spilled all arguments, these registers are available as scratch.
218                     fprReg = FPRInfo::argumentFPR0;
219                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
220                     frOffset += sizeof(Register);
221                 }
222                 jit.convertFloatToDouble(fprReg, fprReg);
223                 jit.purifyNaN(fprReg);
224                 jit.moveDoubleTo64(fprReg, scratch);
225                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
226                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
227                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
228                 calleeFrameOffset += sizeof(Register);
229                 ++marshalledFPRs;
230                 break;
231             }
232             case F64: {
233                 FPRReg fprReg;
234                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
235                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
236                 else {
237                     // We've already spilled all arguments, these registers are available as scratch.
238                     fprReg = FPRInfo::argumentFPR0;
239                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
240                     frOffset += sizeof(Register);
241                 }
242                 jit.purifyNaN(fprReg);
243                 jit.moveDoubleTo64(fprReg, scratch);
244                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
245                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
246                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
247                 calleeFrameOffset += sizeof(Register);
248                 ++marshalledFPRs;
249                 break;
250             }
251             }
252         }
253     }
254
255     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
256     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
257
258     materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
259
260     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
261     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
262     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
263
264     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
265
266     CallLinkInfo* callLinkInfo = callLinkInfos.add();
267     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
268     JIT::DataLabelPtr targetToCheck;
269     JIT::TrustedImmPtr initialRightValue(0);
270     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
271     JIT::Call fastCall = jit.nearCall();
272     JIT::Jump done = jit.jump();
273     slowPath.link(&jit);
274     // Callee needs to be in regT0 here.
275     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
276     JIT::Call slowCall = jit.nearCall();
277     done.link(&jit);
278
279     CCallHelpers::JumpList exceptionChecks;
280
281     switch (signature->returnType()) {
282     case Void:
283         // Discard.
284         break;
285     case Func:
286     case Anyfunc:
287         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
288         RELEASE_ASSERT_NOT_REACHED();
289         break;
290     case I64: {
291         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
292     }
293     case I32: {
294         CCallHelpers::JumpList done;
295         CCallHelpers::JumpList slowPath;
296
297         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
298         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
299         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
300         done.append(jit.jump());
301
302         slowPath.link(&jit);
303         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
304         auto call = jit.call();
305         exceptionChecks.append(jit.emitJumpIfException());
306
307         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t { 
308             VM* vm = &exec->vm();
309             NativeCallFrameTracer tracer(vm, exec);
310             return v.toInt32(exec);
311         };
312         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
313             linkBuffer.link(call, convertToI32);
314         });
315
316         done.link(&jit);
317         break;
318     }
319     case F32: {
320         CCallHelpers::JumpList done;
321         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
322         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
323         // We're an int32
324         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
325         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
326         done.append(jit.jump());
327
328         isDouble.link(&jit);
329         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
330         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
331         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
332         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
333         done.append(jit.jump());
334
335         notANumber.link(&jit);
336         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
337         auto call = jit.call();
338         exceptionChecks.append(jit.emitJumpIfException());
339
340         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float { 
341             VM* vm = &exec->vm();
342             NativeCallFrameTracer tracer(vm, exec);
343             return static_cast<float>(v.toNumber(exec));
344         };
345         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
346             linkBuffer.link(call, convertToF32);
347         });
348
349         done.link(&jit);
350         break;
351     }
352     case F64: {
353         CCallHelpers::JumpList done;
354         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
355         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
356         // We're an int32
357         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
358         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
359         done.append(jit.jump());
360
361         isDouble.link(&jit);
362         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
363         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
364         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
365         done.append(jit.jump());
366
367         notANumber.link(&jit);
368         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
369         auto call = jit.call();
370         exceptionChecks.append(jit.emitJumpIfException());
371
372         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double { 
373             VM* vm = &exec->vm();
374             NativeCallFrameTracer tracer(vm, exec);
375             return v.toNumber(exec);
376         };
377         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
378             linkBuffer.link(call, convertToF64);
379         });
380
381         done.link(&jit);
382         break;
383     }
384     }
385
386     jit.emitFunctionEpilogue();
387     jit.ret();
388
389     if (!exceptionChecks.empty()) {
390         exceptionChecks.link(&jit);
391         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
392         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
393         auto call = jit.call();
394         jit.jumpToExceptionHandler();
395
396         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
397             VM* vm = &exec->vm();
398             NativeCallFrameTracer tracer(vm, exec);
399             genericUnwind(vm, exec);
400             ASSERT(!!vm->callFrameForCatch);
401         };
402
403         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
404             linkBuffer.link(call, doUnwinding);
405         });
406     }
407
408     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
409     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
410     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
411     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
412     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
413     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
414 #if !defined(NDEBUG)
415     String signatureDescription = SignatureInformation::get(vm, signatureIndex)->toString();
416 #else
417     String signatureDescription;
418 #endif
419     return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signatureDescription.ascii().data()));
420 }
421
422 static MacroAssemblerCodeRef wasmToWasm(VM* vm, unsigned importIndex)
423 {
424     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
425     JIT jit(vm, nullptr);
426
427     GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
428
429     // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
430     materializeImportJSCell(jit, importIndex, scratch);
431
432     // Get the callee's WebAssembly.Instance and set it as WasmContext. The caller will take care of restoring its own Instance.
433     GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
434     ASSERT(baseMemory != scratch);
435     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
436     jit.storeWasmContext(baseMemory);
437
438     // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
439     // Set up the callee's baseMemory register as well as the memory size registers.
440     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
441     const auto& sizeRegs = pinnedRegs.sizeRegisters;
442     ASSERT(sizeRegs.size() >= 1);
443     ASSERT(sizeRegs[0].sizeRegister != baseMemory);
444     ASSERT(sizeRegs[0].sizeRegister != scratch);
445     ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
446     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
447     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
448     for (unsigned i = 1; i < sizeRegs.size(); ++i) {
449         ASSERT(sizeRegs[i].sizeRegister != baseMemory);
450         ASSERT(sizeRegs[i].sizeRegister != scratch);
451         jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
452     }
453
454     // Tail call into the callee WebAssembly function.
455     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntryPointCode()), scratch);
456     jit.jump(scratch);
457
458     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
459     return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
460 }
461
462 WasmExitStubs exitStubGenerator(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
463 {
464     WasmExitStubs stubs;
465     stubs.wasmToJs = wasmToJs(vm, callLinkInfos, signatureIndex, importIndex);
466     stubs.wasmToWasm = wasmToWasm(vm, importIndex);
467     return stubs;
468 }
469
470 } } // namespace JSC::Wasm
471
472 #endif // ENABLE(WEBASSEMBLY)