Remove excessive headers from JavaScriptCore
[WebKit-https.git] / Source / JavaScriptCore / wasm / WasmBinding.cpp
1 /*
2  * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmBinding.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "LinkBuffer.h"
37 #include "NativeErrorConstructor.h"
38 #include "ThunkGenerators.h"
39 #include "WasmCallingConvention.h"
40 #include "WasmContext.h"
41 #include "WasmExceptionType.h"
42
43 namespace JSC { namespace Wasm {
44
45 using JIT = CCallHelpers;
46
47 static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
48 {
49     // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
50     jit.loadWasmContext(result);
51     jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
52 }
53
54 MacroAssemblerCodeRef wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
55 {
56     // FIXME: This function doesn't properly abstract away the calling convention.
57     // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
58     const WasmCallingConvention& wasmCC = wasmCallingConvention();
59     const JSCCallingConvention& jsCC = jscCallingConvention();
60     const Signature& signature = SignatureInformation::get(signatureIndex);
61     unsigned argCount = signature.argumentCount();
62     JIT jit;
63
64     // Note: WasmB3IRGenerator assumes that this stub treats SP as a callee save.
65     // If we ever change this, we will also need to change WasmB3IRGenerator.
66
67     // Below, we assume that the JS calling convention is always on the stack.
68     ASSERT(!jsCC.m_gprArgs.size());
69     ASSERT(!jsCC.m_fprArgs.size());
70
71     jit.emitFunctionPrologue();
72     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
73
74     {
75         bool hasBadI64Use = false;
76         hasBadI64Use |= signature.returnType() == I64;
77         for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
78             Type argType = signature.argument(argNum);
79             switch (argType) {
80             case Void:
81             case Func:
82             case Anyfunc:
83                 RELEASE_ASSERT_NOT_REACHED();
84
85             case I64: {
86                 hasBadI64Use = true;
87                 break;
88             }
89
90             default:
91                 break;
92             }
93         }
94
95         if (hasBadI64Use) {
96             jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
97             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
98             jit.loadWasmContext(GPRInfo::argumentGPR1);
99
100             // Store Callee.
101             jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR2);
102             jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
103
104             auto call = jit.call();
105             jit.jumpToExceptionHandler(*vm);
106
107             void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* wasmContext) -> void {
108                 VM* vm = &exec->vm();
109                 NativeCallFrameTracer tracer(vm, exec);
110
111                 {
112                     auto throwScope = DECLARE_THROW_SCOPE(*vm);
113                     JSGlobalObject* globalObject = wasmContext->globalObject();
114                     auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
115                     throwException(exec, throwScope, error);
116                 }
117
118                 genericUnwind(vm, exec);
119                 ASSERT(!!vm->callFrameForCatch);
120             };
121
122             LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
123             linkBuffer.link(call, throwBadI64);
124             return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
125         }
126     }
127
128     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
129     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
130     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
131     ASSERT(missingCalleeSaves.isEmpty());
132
133     if (!Options::useCallICsForWebAssemblyToJSCalls()) {
134         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
135         char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
136         unsigned marshalledGPRs = 0;
137         unsigned marshalledFPRs = 0;
138         unsigned bufferOffset = 0;
139         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
140         const GPRReg scratchGPR = GPRInfo::regCS0;
141         jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
142         jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
143
144         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
145             Type argType = signature.argument(argNum);
146             switch (argType) {
147             case Void:
148             case Func:
149             case Anyfunc:
150             case I64:
151                 RELEASE_ASSERT_NOT_REACHED();
152             case I32: {
153                 GPRReg gprReg;
154                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
155                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
156                 else {
157                     // We've already spilled all arguments, these registers are available as scratch.
158                     gprReg = GPRInfo::argumentGPR0;
159                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
160                     frOffset += sizeof(Register);
161                 }
162                 jit.zeroExtend32ToPtr(gprReg, gprReg);
163                 jit.store64(gprReg, buffer + bufferOffset);
164                 ++marshalledGPRs;
165                 break;
166             }
167             case F32: {
168                 FPRReg fprReg;
169                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
170                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
171                 else {
172                     // We've already spilled all arguments, these registers are available as scratch.
173                     fprReg = FPRInfo::argumentFPR0;
174                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
175                     frOffset += sizeof(Register);
176                 }
177                 jit.convertFloatToDouble(fprReg, fprReg);
178                 jit.moveDoubleTo64(fprReg, scratchGPR);
179                 jit.store64(scratchGPR, buffer + bufferOffset);
180                 ++marshalledFPRs;
181                 break;
182             }
183             case F64: {
184                 FPRReg fprReg;
185                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
186                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
187                 else {
188                     // We've already spilled all arguments, these registers are available as scratch.
189                     fprReg = FPRInfo::argumentFPR0;
190                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
191                     frOffset += sizeof(Register);
192                 }
193                 jit.moveDoubleTo64(fprReg, scratchGPR);
194                 jit.store64(scratchGPR, buffer + bufferOffset);
195                 ++marshalledFPRs;
196                 break;
197             }
198             }
199
200             bufferOffset += sizeof(Register);
201         }
202         jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
203         if (argCount) {
204             // The GC should not look at this buffer at all, these aren't JSValues.
205             jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::argumentGPR0);
206             jit.storePtr(CCallHelpers::TrustedImmPtr(0), GPRInfo::argumentGPR0);
207         }
208
209         uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
210             [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t { 
211                 VM* vm = &exec->vm();
212                 NativeCallFrameTracer tracer(vm, exec);
213                 auto throwScope = DECLARE_THROW_SCOPE(*vm);
214                 const Signature& signature = SignatureInformation::get(signatureIndex);
215                 MarkedArgumentBuffer args;
216                 for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
217                     Type argType = signature.argument(argNum);
218                     JSValue arg;
219                     switch (argType) {
220                     case Void:
221                     case Func:
222                     case Anyfunc:
223                     case I64:
224                         RELEASE_ASSERT_NOT_REACHED();
225                     case I32:
226                         arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
227                         break;
228                     case F32:
229                     case F64:
230                         arg = jsNumber(bitwise_cast<double>(buffer[argNum]));
231                         break;
232                     }
233                     args.append(arg);
234                 }
235
236                 CallData callData;
237                 CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
238                 RELEASE_ASSERT(callType != CallType::None);
239                 JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
240                 RETURN_IF_EXCEPTION(throwScope, 0);
241
242                 uint64_t realResult;
243                 switch (signature.returnType()) {
244                 case Func:
245                 case Anyfunc:
246                 case I64:
247                     RELEASE_ASSERT_NOT_REACHED();
248                     break;
249                 case Void:
250                     break;
251                 case I32: {
252                     realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
253                     break;
254                 }
255                 case F64:
256                 case F32: {
257                     realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
258                     break;
259                 }
260                 }
261
262                 RETURN_IF_EXCEPTION(throwScope, 0);
263                 return realResult;
264             };
265         
266         jit.loadWasmContext(GPRInfo::argumentGPR0);
267         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
268         jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
269         
270         materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR1);
271         static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
272         jit.setupArgumentsWithExecState(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
273         auto call = jit.call();
274         auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
275
276         // exception here.
277         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
278         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
279         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
280             VM* vm = &exec->vm();
281             NativeCallFrameTracer tracer(vm, exec);
282             genericUnwind(vm, exec);
283             ASSERT(!!vm->callFrameForCatch);
284         };
285         auto exceptionCall = jit.call();
286         jit.jumpToExceptionHandler(*vm);
287
288         noException.link(&jit);
289         switch (signature.returnType()) {
290         case F64: {
291             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
292             break;
293         }
294         case F32: {
295             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
296             jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
297             break;
298         }
299         default:
300             break;
301         }
302
303         jit.emitFunctionEpilogue();
304         jit.ret();
305
306         LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
307         linkBuffer.link(call, callFunc);
308         linkBuffer.link(exceptionCall, doUnwinding);
309
310         return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
311     }
312
313     // FIXME perform a stack check before updating SP. https://bugs.webkit.org/show_bug.cgi?id=165546
314
315     const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
316     const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
317     const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
318     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
319     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
320     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
321
322     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
323     
324     // First go through the integer parameters, freeing up their register for use afterwards.
325     {
326         unsigned marshalledGPRs = 0;
327         unsigned marshalledFPRs = 0;
328         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
329         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
330         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
331             Type argType = signature.argument(argNum);
332             switch (argType) {
333             case Void:
334             case Func:
335             case Anyfunc:
336             case I64:
337                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
338             case I32: {
339                 GPRReg gprReg;
340                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
341                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
342                 else {
343                     // We've already spilled all arguments, these registers are available as scratch.
344                     gprReg = GPRInfo::argumentGPR0;
345                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
346                     frOffset += sizeof(Register);
347                 }
348                 ++marshalledGPRs;
349                 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
350                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
351                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
352                 calleeFrameOffset += sizeof(Register);
353                 break;
354             }
355             case F32:
356             case F64:
357                 // Skipped: handled below.
358                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
359                     frOffset += sizeof(Register);
360                 ++marshalledFPRs;
361                 calleeFrameOffset += sizeof(Register);
362                 break;
363             }
364         }
365     }
366     
367     {
368         // Integer registers have already been spilled, these are now available.
369         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
370         GPRReg scratch = GPRInfo::argumentGPR1;
371         bool hasMaterializedDoubleEncodeOffset = false;
372         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
373             if (!hasMaterializedDoubleEncodeOffset) {
374                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
375                 jit.move(JIT::TrustedImm32(1), dest);
376                 jit.lshift64(JIT::TrustedImm32(48), dest);
377                 hasMaterializedDoubleEncodeOffset = true;
378             }
379         };
380
381         unsigned marshalledGPRs = 0;
382         unsigned marshalledFPRs = 0;
383         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
384         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
385         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
386             Type argType = signature.argument(argNum);
387             switch (argType) {
388             case Void:
389             case Func:
390             case Anyfunc:
391             case I64:
392                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
393             case I32:
394                 // Skipped: handled above.
395                 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
396                     frOffset += sizeof(Register);
397                 ++marshalledGPRs;
398                 calleeFrameOffset += sizeof(Register);
399                 break;
400             case F32: {
401                 FPRReg fprReg;
402                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
403                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
404                 else {
405                     // We've already spilled all arguments, these registers are available as scratch.
406                     fprReg = FPRInfo::argumentFPR0;
407                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
408                     frOffset += sizeof(Register);
409                 }
410                 jit.convertFloatToDouble(fprReg, fprReg);
411                 jit.purifyNaN(fprReg);
412                 jit.moveDoubleTo64(fprReg, scratch);
413                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
414                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
415                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
416                 calleeFrameOffset += sizeof(Register);
417                 ++marshalledFPRs;
418                 break;
419             }
420             case F64: {
421                 FPRReg fprReg;
422                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
423                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
424                 else {
425                     // We've already spilled all arguments, these registers are available as scratch.
426                     fprReg = FPRInfo::argumentFPR0;
427                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
428                     frOffset += sizeof(Register);
429                 }
430                 jit.purifyNaN(fprReg);
431                 jit.moveDoubleTo64(fprReg, scratch);
432                 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
433                 jit.add64(doubleEncodeOffsetGPRReg, scratch);
434                 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
435                 calleeFrameOffset += sizeof(Register);
436                 ++marshalledFPRs;
437                 break;
438             }
439             }
440         }
441     }
442
443     jit.loadWasmContext(GPRInfo::argumentGPR0);
444     jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
445     jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
446
447     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
448     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
449
450     materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
451
452     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
453     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
454     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
455
456     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
457
458     CallLinkInfo* callLinkInfo = callLinkInfos.add();
459     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
460     JIT::DataLabelPtr targetToCheck;
461     JIT::TrustedImmPtr initialRightValue(0);
462     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
463     JIT::Call fastCall = jit.nearCall();
464     JIT::Jump done = jit.jump();
465     slowPath.link(&jit);
466     // Callee needs to be in regT0 here.
467     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
468     JIT::Call slowCall = jit.nearCall();
469     done.link(&jit);
470
471     CCallHelpers::JumpList exceptionChecks;
472
473     switch (signature.returnType()) {
474     case Void:
475         // Discard.
476         break;
477     case Func:
478     case Anyfunc:
479         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
480         RELEASE_ASSERT_NOT_REACHED();
481         break;
482     case I64: {
483         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
484     }
485     case I32: {
486         CCallHelpers::JumpList done;
487         CCallHelpers::JumpList slowPath;
488
489         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
490         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
491         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
492         done.append(jit.jump());
493
494         slowPath.link(&jit);
495         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
496         auto call = jit.call();
497         exceptionChecks.append(jit.emitJumpIfException(*vm));
498
499         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t { 
500             VM* vm = &exec->vm();
501             NativeCallFrameTracer tracer(vm, exec);
502             return v.toInt32(exec);
503         };
504         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
505             linkBuffer.link(call, convertToI32);
506         });
507
508         done.link(&jit);
509         break;
510     }
511     case F32: {
512         CCallHelpers::JumpList done;
513         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
514         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
515         // We're an int32
516         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
517         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
518         done.append(jit.jump());
519
520         isDouble.link(&jit);
521         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
522         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
523         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
524         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
525         done.append(jit.jump());
526
527         notANumber.link(&jit);
528         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
529         auto call = jit.call();
530         exceptionChecks.append(jit.emitJumpIfException(*vm));
531
532         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float { 
533             VM* vm = &exec->vm();
534             NativeCallFrameTracer tracer(vm, exec);
535             return static_cast<float>(v.toNumber(exec));
536         };
537         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
538             linkBuffer.link(call, convertToF32);
539         });
540
541         done.link(&jit);
542         break;
543     }
544     case F64: {
545         CCallHelpers::JumpList done;
546         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
547         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
548         // We're an int32
549         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
550         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
551         done.append(jit.jump());
552
553         isDouble.link(&jit);
554         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
555         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
556         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
557         done.append(jit.jump());
558
559         notANumber.link(&jit);
560         jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
561         auto call = jit.call();
562         exceptionChecks.append(jit.emitJumpIfException(*vm));
563
564         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double { 
565             VM* vm = &exec->vm();
566             NativeCallFrameTracer tracer(vm, exec);
567             return v.toNumber(exec);
568         };
569         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
570             linkBuffer.link(call, convertToF64);
571         });
572
573         done.link(&jit);
574         break;
575     }
576     }
577
578     jit.emitFunctionEpilogue();
579     jit.ret();
580
581     if (!exceptionChecks.empty()) {
582         exceptionChecks.link(&jit);
583         jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
584         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
585         auto call = jit.call();
586         jit.jumpToExceptionHandler(*vm);
587
588         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
589             VM* vm = &exec->vm();
590             NativeCallFrameTracer tracer(vm, exec);
591             genericUnwind(vm, exec);
592             ASSERT(!!vm->callFrameForCatch);
593         };
594
595         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
596             linkBuffer.link(call, doUnwinding);
597         });
598     }
599
600     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
601     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
602     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
603     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
604     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
605     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
606
607     return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
608 }
609
610 MacroAssemblerCodeRef wasmToWasm(unsigned importIndex)
611 {
612     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
613     JIT jit;
614
615     GPRReg scratch = GPRInfo::nonPreservedNonArgumentGPR;
616     GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
617     ASSERT(baseMemory != scratch);
618     const auto& sizeRegs = pinnedRegs.sizeRegisters;
619     ASSERT(sizeRegs.size() >= 1);
620     ASSERT(sizeRegs[0].sizeRegister != baseMemory);
621     ASSERT(sizeRegs[0].sizeRegister != scratch);
622     GPRReg sizeRegAsScratch = sizeRegs[0].sizeRegister;
623
624     static_assert(std::is_same<Context, JSWebAssemblyInstance>::value, "This is assumed in the code below.");
625     // B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
626     jit.loadWasmContext(sizeRegAsScratch); // Old Instance*
627     jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), scratch);
628
629     // Get the callee's WebAssembly.Instance and set it as WasmContext. The caller will take care of restoring its own Instance.
630     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
631     jit.storeWasmContext(baseMemory);
632
633     jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfCachedStackLimit()), sizeRegAsScratch);
634     jit.storePtr(sizeRegAsScratch, JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfCachedStackLimit()));
635
636     // FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
637     // Set up the callee's baseMemory register as well as the memory size registers.
638     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
639     ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
640     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
641     jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
642     for (unsigned i = 1; i < sizeRegs.size(); ++i) {
643         ASSERT(sizeRegs[i].sizeRegister != baseMemory);
644         ASSERT(sizeRegs[i].sizeRegister != scratch);
645         jit.add64(JIT::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
646     }
647
648     // Tail call into the callee WebAssembly function.
649     jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntrypointLoadLocation()), scratch);
650     jit.loadPtr(scratch, scratch);
651     jit.jump(scratch);
652
653     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
654     return FINALIZE_CODE(patchBuffer, ("WebAssembly->WebAssembly import[%i]", importIndex));
655 }
656
657 } } // namespace JSC::Wasm
658
659 #endif // ENABLE(WEBASSEMBLY)