Enhance the MacroAssembler and LinkBuffer to support pointer profiling.
[WebKit-https.git] / Source / JavaScriptCore / wasm / js / WasmToJS.cpp
1 /*
2  * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "WasmToJS.h"
28
29 #if ENABLE(WEBASSEMBLY)
30
31 #include "CCallHelpers.h"
32 #include "FrameTracers.h"
33 #include "JITExceptions.h"
34 #include "JSCInlines.h"
35 #include "JSWebAssemblyInstance.h"
36 #include "JSWebAssemblyRuntimeError.h"
37 #include "LinkBuffer.h"
38 #include "NativeErrorConstructor.h"
39 #include "ThunkGenerators.h"
40 #include "WasmCallingConvention.h"
41 #include "WasmContext.h"
42 #include "WasmExceptionType.h"
43 #include "WasmInstance.h"
44
45 namespace JSC { namespace Wasm {
46
47 using JIT = CCallHelpers;
48
49 static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg poison, GPRReg result)
50 {
51     // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
52     jit.loadWasmContextInstance(result);
53     jit.loadPtr(JIT::Address(result, Instance::offsetOfImportFunction(importIndex)), result);
54     jit.xor64(poison, result);
55 }
56
57 static Expected<MacroAssemblerCodeRef, BindingFailure> handleBadI64Use(VM* vm, JIT& jit, const Signature& signature, unsigned importIndex)
58 {
59     unsigned argCount = signature.argumentCount();
60
61     bool hasBadI64Use = false;
62     hasBadI64Use |= signature.returnType() == I64;
63     for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
64         Type argType = signature.argument(argNum);
65         switch (argType) {
66         case Void:
67         case Func:
68         case Anyfunc:
69             RELEASE_ASSERT_NOT_REACHED();
70
71         case I64: {
72             hasBadI64Use = true;
73             break;
74         }
75
76         default:
77             break;
78         }
79     }
80
81     if (hasBadI64Use) {
82         jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
83         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
84         jit.loadWasmContextInstance(GPRInfo::argumentGPR1);
85
86         // Store Callee.
87         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, Instance::offsetOfOwner()), GPRInfo::argumentGPR1);
88         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfPoisonedCallee()), GPRInfo::argumentGPR2);
89         jit.move(CCallHelpers::TrustedImm64(JSWebAssemblyInstancePoison::key()), GPRInfo::argumentGPR3);
90         jit.xor64(GPRInfo::argumentGPR3, GPRInfo::argumentGPR2);
91         jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
92
93         // Let's be paranoid on the exception path and zero out the poison instead of leaving it in an argument GPR.
94         jit.move(CCallHelpers::TrustedImm32(0), GPRInfo::argumentGPR3);
95
96         auto call = jit.call(NoPtrTag);
97         jit.jumpToExceptionHandler(*vm);
98
99         void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* instance) -> void {
100             VM* vm = &exec->vm();
101             NativeCallFrameTracer tracer(vm, exec);
102
103             {
104                 auto throwScope = DECLARE_THROW_SCOPE(*vm);
105                 JSGlobalObject* globalObject = instance->globalObject();
106                 auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
107                 throwException(exec, throwScope, error);
108             }
109
110             genericUnwind(vm, exec);
111             ASSERT(!!vm->callFrameForCatch);
112         };
113
114         LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
115         if (UNLIKELY(linkBuffer.didFailToAllocate()))
116             return makeUnexpected(BindingFailure::OutOfMemory);
117
118         linkBuffer.link(call, throwBadI64);
119         return FINALIZE_CODE(linkBuffer, NoPtrTag, "WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex);
120     }
121     
122     return MacroAssemblerCodeRef();
123 }
124
125 Expected<MacroAssemblerCodeRef, BindingFailure> wasmToJS(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
126 {
127     // FIXME: This function doesn't properly abstract away the calling convention.
128     // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
129     const WasmCallingConvention& wasmCC = wasmCallingConvention();
130     const JSCCallingConvention& jsCC = jscCallingConvention();
131     const Signature& signature = SignatureInformation::get(signatureIndex);
132     unsigned argCount = signature.argumentCount();
133     JIT jit;
134
135     // Note: WasmB3IRGenerator assumes that this stub treats SP as a callee save.
136     // If we ever change this, we will also need to change WasmB3IRGenerator.
137
138     // Below, we assume that the JS calling convention is always on the stack.
139     ASSERT(!jsCC.m_gprArgs.size());
140     ASSERT(!jsCC.m_fprArgs.size());
141
142     jit.emitFunctionPrologue();
143     jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
144
145     auto badI64 = handleBadI64Use(vm, jit, signature, importIndex);
146     if (!badI64 || badI64.value())
147         return badI64;
148
149     // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
150     RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
151     missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
152     ASSERT(missingCalleeSaves.isEmpty());
153
154     if (!Options::useCallICsForWebAssemblyToJSCalls()) {
155         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
156         char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
157         unsigned marshalledGPRs = 0;
158         unsigned marshalledFPRs = 0;
159         unsigned bufferOffset = 0;
160         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
161         const GPRReg scratchGPR = GPRInfo::regCS0;
162         jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
163         jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
164
165         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
166             Type argType = signature.argument(argNum);
167             switch (argType) {
168             case Void:
169             case Func:
170             case Anyfunc:
171             case I64:
172                 RELEASE_ASSERT_NOT_REACHED();
173             case I32: {
174                 GPRReg gprReg;
175                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
176                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
177                 else {
178                     // We've already spilled all arguments, these registers are available as scratch.
179                     gprReg = GPRInfo::argumentGPR0;
180                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
181                     frOffset += sizeof(Register);
182                 }
183                 jit.zeroExtend32ToPtr(gprReg, gprReg);
184                 jit.store64(gprReg, buffer + bufferOffset);
185                 ++marshalledGPRs;
186                 break;
187             }
188             case F32: {
189                 FPRReg fprReg;
190                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
191                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
192                 else {
193                     // We've already spilled all arguments, these registers are available as scratch.
194                     fprReg = FPRInfo::argumentFPR0;
195                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
196                     frOffset += sizeof(Register);
197                 }
198                 jit.convertFloatToDouble(fprReg, fprReg);
199                 jit.moveDoubleTo64(fprReg, scratchGPR);
200                 jit.store64(scratchGPR, buffer + bufferOffset);
201                 ++marshalledFPRs;
202                 break;
203             }
204             case F64: {
205                 FPRReg fprReg;
206                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
207                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
208                 else {
209                     // We've already spilled all arguments, these registers are available as scratch.
210                     fprReg = FPRInfo::argumentFPR0;
211                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
212                     frOffset += sizeof(Register);
213                 }
214                 jit.moveDoubleTo64(fprReg, scratchGPR);
215                 jit.store64(scratchGPR, buffer + bufferOffset);
216                 ++marshalledFPRs;
217                 break;
218             }
219             }
220
221             bufferOffset += sizeof(Register);
222         }
223         jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
224         if (argCount) {
225             // The GC should not look at this buffer at all, these aren't JSValues.
226             jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::argumentGPR0);
227             jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), GPRInfo::argumentGPR0);
228         }
229
230         uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
231             [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t { 
232                 VM* vm = &exec->vm();
233                 NativeCallFrameTracer tracer(vm, exec);
234                 auto throwScope = DECLARE_THROW_SCOPE(*vm);
235                 const Signature& signature = SignatureInformation::get(signatureIndex);
236                 MarkedArgumentBuffer args;
237                 for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
238                     Type argType = signature.argument(argNum);
239                     JSValue arg;
240                     switch (argType) {
241                     case Void:
242                     case Func:
243                     case Anyfunc:
244                     case I64:
245                         RELEASE_ASSERT_NOT_REACHED();
246                     case I32:
247                         arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
248                         break;
249                     case F32:
250                     case F64:
251                         arg = jsNumber(bitwise_cast<double>(buffer[argNum]));
252                         break;
253                     }
254                     args.append(arg);
255                 }
256                 if (UNLIKELY(args.hasOverflowed())) {
257                     throwOutOfMemoryError(exec, throwScope);
258                     return 0;
259                 }
260
261                 CallData callData;
262                 CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
263                 RELEASE_ASSERT(callType != CallType::None);
264                 JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
265                 RETURN_IF_EXCEPTION(throwScope, 0);
266
267                 uint64_t realResult;
268                 switch (signature.returnType()) {
269                 case Func:
270                 case Anyfunc:
271                 case I64:
272                     RELEASE_ASSERT_NOT_REACHED();
273                     break;
274                 case Void:
275                     break;
276                 case I32: {
277                     realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
278                     break;
279                 }
280                 case F64:
281                 case F32: {
282                     realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
283                     break;
284                 }
285                 }
286
287                 RETURN_IF_EXCEPTION(throwScope, 0);
288                 return realResult;
289             };
290         
291         jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
292         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
293         jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfPoisonedCallee()), GPRInfo::argumentGPR0);
294         jit.move(CCallHelpers::TrustedImm64(JSWebAssemblyInstancePoison::key()), GPRInfo::argumentGPR3);
295         jit.xor64(GPRInfo::argumentGPR3, GPRInfo::argumentGPR0);
296         jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
297         
298         materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR3, GPRInfo::argumentGPR1);
299         
300         // Let's be paranoid before the call and zero out the poison instead of leaving it in an argument GPR.
301         jit.move(CCallHelpers::TrustedImm32(0), GPRInfo::argumentGPR3);
302
303         static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
304         jit.setupArguments<decltype(callFunc)>(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
305         auto call = jit.call(NoPtrTag);
306         auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
307
308         // Exception here.
309         jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
310         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
311         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
312             VM* vm = &exec->vm();
313             NativeCallFrameTracer tracer(vm, exec);
314             genericUnwind(vm, exec);
315             ASSERT(!!vm->callFrameForCatch);
316         };
317         auto exceptionCall = jit.call(NoPtrTag);
318         jit.jumpToExceptionHandler(*vm);
319
320         noException.link(&jit);
321         switch (signature.returnType()) {
322         case F64: {
323             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
324             break;
325         }
326         case F32: {
327             jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
328             jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
329             break;
330         }
331         default:
332             break;
333         }
334
335         jit.emitFunctionEpilogue();
336         jit.ret();
337
338         LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
339         if (UNLIKELY(linkBuffer.didFailToAllocate()))
340             return makeUnexpected(BindingFailure::OutOfMemory);
341
342         linkBuffer.link(call, callFunc);
343         linkBuffer.link(exceptionCall, doUnwinding);
344
345         return FINALIZE_CODE(linkBuffer, NoPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
346     }
347
348     // Note: We don't need to perform a stack check here since WasmB3IRGenerator
349     // will do the stack check for us. Whenever it detects that it might make
350     // a call to this thunk, it'll make sure its stack check includes space
351     // for us here.
352
353     const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
354     const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
355     const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
356     const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
357     jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
358     JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
359
360     // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
361     
362     // First go through the integer parameters, freeing up their register for use afterwards.
363     {
364         unsigned marshalledGPRs = 0;
365         unsigned marshalledFPRs = 0;
366         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
367         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
368         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
369             Type argType = signature.argument(argNum);
370             switch (argType) {
371             case Void:
372             case Func:
373             case Anyfunc:
374             case I64:
375                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
376             case I32: {
377                 GPRReg gprReg;
378                 if (marshalledGPRs < wasmCC.m_gprArgs.size())
379                     gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
380                 else {
381                     // We've already spilled all arguments, these registers are available as scratch.
382                     gprReg = GPRInfo::argumentGPR0;
383                     jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
384                     frOffset += sizeof(Register);
385                 }
386                 ++marshalledGPRs;
387                 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
388                 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
389                 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
390                 calleeFrameOffset += sizeof(Register);
391                 break;
392             }
393             case F32:
394             case F64:
395                 // Skipped: handled below.
396                 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
397                     frOffset += sizeof(Register);
398                 ++marshalledFPRs;
399                 calleeFrameOffset += sizeof(Register);
400                 break;
401             }
402         }
403     }
404     
405     {
406         // Integer registers have already been spilled, these are now available.
407         GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
408         GPRReg scratch = GPRInfo::argumentGPR1;
409         bool hasMaterializedDoubleEncodeOffset = false;
410         auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
411             if (!hasMaterializedDoubleEncodeOffset) {
412                 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
413                 jit.move(JIT::TrustedImm32(1), dest);
414                 jit.lshift64(JIT::TrustedImm32(48), dest);
415                 hasMaterializedDoubleEncodeOffset = true;
416             }
417         };
418
419         unsigned marshalledGPRs = 0;
420         unsigned marshalledFPRs = 0;
421         unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
422         unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
423
424         auto marshallFPR = [&] (FPRReg fprReg) {
425             jit.purifyNaN(fprReg);
426             jit.moveDoubleTo64(fprReg, scratch);
427             materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
428             jit.add64(doubleEncodeOffsetGPRReg, scratch);
429             jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
430             calleeFrameOffset += sizeof(Register);
431             ++marshalledFPRs;
432         };
433
434         for (unsigned argNum = 0; argNum < argCount; ++argNum) {
435             Type argType = signature.argument(argNum);
436             switch (argType) {
437             case Void:
438             case Func:
439             case Anyfunc:
440             case I64:
441                 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
442             case I32:
443                 // Skipped: handled above.
444                 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
445                     frOffset += sizeof(Register);
446                 ++marshalledGPRs;
447                 calleeFrameOffset += sizeof(Register);
448                 break;
449             case F32: {
450                 FPRReg fprReg;
451                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
452                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
453                 else {
454                     // We've already spilled all arguments, these registers are available as scratch.
455                     fprReg = FPRInfo::argumentFPR0;
456                     jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
457                     frOffset += sizeof(Register);
458                 }
459                 jit.convertFloatToDouble(fprReg, fprReg);
460                 marshallFPR(fprReg);
461                 break;
462             }
463             case F64: {
464                 FPRReg fprReg;
465                 if (marshalledFPRs < wasmCC.m_fprArgs.size())
466                     fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
467                 else {
468                     // We've already spilled all arguments, these registers are available as scratch.
469                     fprReg = FPRInfo::argumentFPR0;
470                     jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
471                     frOffset += sizeof(Register);
472                 }
473                 marshallFPR(fprReg);
474                 break;
475             }
476             }
477         }
478     }
479
480     GPRReg poison = GPRInfo::argumentGPR1;
481     ASSERT(poison != GPRInfo::argumentGPR0); // Both are used at the same time below.
482
483     jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
484     jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
485     jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfPoisonedCallee()), GPRInfo::argumentGPR0);
486     jit.move(CCallHelpers::TrustedImm64(JSWebAssemblyInstancePoison::key()), poison);
487     jit.xor64(poison, GPRInfo::argumentGPR0);
488     jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
489
490     GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
491     ASSERT(poison != importJSCellGPRReg);
492
493     ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
494     materializeImportJSCell(jit, importIndex, poison, importJSCellGPRReg);
495
496     // Let's be paranoid zero out the poison instead of leaving it in an argument GPR.
497     jit.move(CCallHelpers::TrustedImm32(0), poison);
498
499     jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
500     jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
501     jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
502
503     // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
504
505     CallLinkInfo* callLinkInfo = callLinkInfos.add();
506     callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
507     JIT::DataLabelPtr targetToCheck;
508     JIT::TrustedImmPtr initialRightValue(nullptr);
509     JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
510     JIT::Call fastCall = jit.nearCall();
511     JIT::Jump done = jit.jump();
512     slowPath.link(&jit);
513     // Callee needs to be in regT0 here.
514     jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
515     JIT::Call slowCall = jit.nearCall();
516     done.link(&jit);
517
518     CCallHelpers::JumpList exceptionChecks;
519
520     switch (signature.returnType()) {
521     case Void:
522         // Discard.
523         break;
524     case Func:
525     case Anyfunc:
526         // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
527         RELEASE_ASSERT_NOT_REACHED();
528         break;
529     case I64: {
530         RELEASE_ASSERT_NOT_REACHED(); // Handled above.
531     }
532     case I32: {
533         CCallHelpers::JumpList done;
534         CCallHelpers::JumpList slowPath;
535
536         int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t {
537             VM* vm = &exec->vm();
538             NativeCallFrameTracer tracer(vm, exec);
539             return v.toInt32(exec);
540         };
541
542         slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
543         slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
544         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
545         done.append(jit.jump());
546
547         slowPath.link(&jit);
548         jit.setupArguments<decltype(convertToI32)>(GPRInfo::returnValueGPR);
549         auto call = jit.call(NoPtrTag);
550         exceptionChecks.append(jit.emitJumpIfException(*vm));
551
552         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
553             linkBuffer.link(call, convertToI32);
554         });
555
556         done.link(&jit);
557         break;
558     }
559     case F32: {
560         CCallHelpers::JumpList done;
561
562         float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float {
563             VM* vm = &exec->vm();
564             NativeCallFrameTracer tracer(vm, exec);
565             return static_cast<float>(v.toNumber(exec));
566         };
567
568         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
569         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
570         // We're an int32
571         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
572         jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
573         done.append(jit.jump());
574
575         isDouble.link(&jit);
576         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
577         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
578         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
579         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
580         done.append(jit.jump());
581
582         notANumber.link(&jit);
583         jit.setupArguments<decltype(convertToF32)>(GPRInfo::returnValueGPR);
584         auto call = jit.call(NoPtrTag);
585         exceptionChecks.append(jit.emitJumpIfException(*vm));
586
587         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
588             linkBuffer.link(call, convertToF32);
589         });
590
591         done.link(&jit);
592         break;
593     }
594     case F64: {
595         CCallHelpers::JumpList done;
596
597         double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double {
598             VM* vm = &exec->vm();
599             NativeCallFrameTracer tracer(vm, exec);
600             return v.toNumber(exec);
601         };
602
603         auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
604         auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
605         // We're an int32
606         jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
607         jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
608         done.append(jit.jump());
609
610         isDouble.link(&jit);
611         jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
612         jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
613         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
614         done.append(jit.jump());
615
616         notANumber.link(&jit);
617         jit.setupArguments<decltype(convertToF64)>(GPRInfo::returnValueGPR);
618         auto call = jit.call(NoPtrTag);
619         exceptionChecks.append(jit.emitJumpIfException(*vm));
620
621         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
622             linkBuffer.link(call, convertToF64);
623         });
624
625         done.link(&jit);
626         break;
627     }
628     }
629
630     jit.emitFunctionEpilogue();
631     jit.ret();
632
633     if (!exceptionChecks.empty()) {
634         exceptionChecks.link(&jit);
635         jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
636         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
637         auto call = jit.call(NoPtrTag);
638         jit.jumpToExceptionHandler(*vm);
639
640         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
641             VM* vm = &exec->vm();
642             NativeCallFrameTracer tracer(vm, exec);
643             genericUnwind(vm, exec);
644             ASSERT(!!vm->callFrameForCatch);
645         };
646
647         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
648             linkBuffer.link(call, doUnwinding);
649         });
650     }
651
652     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
653     if (UNLIKELY(patchBuffer.didFailToAllocate()))
654         return makeUnexpected(BindingFailure::OutOfMemory);
655
656     patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code()));
657     CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
658     CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
659     CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
660     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
661
662     return FINALIZE_CODE(patchBuffer, NoPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
663 }
664
665 void* wasmToJSException(ExecState* exec, Wasm::ExceptionType type, Instance* wasmInstance)
666 {
667     wasmInstance->storeTopCallFrame(exec);
668     JSWebAssemblyInstance* instance = wasmInstance->owner<JSWebAssemblyInstance>();
669     JSGlobalObject* globalObject = instance->globalObject();
670     VM& vm = globalObject->vm();
671
672     {
673         auto throwScope = DECLARE_THROW_SCOPE(vm);
674
675         JSObject* error;
676         if (type == ExceptionType::StackOverflow)
677             error = createStackOverflowError(exec, globalObject);
678         else
679             error = JSWebAssemblyRuntimeError::create(exec, vm, globalObject->WebAssemblyRuntimeErrorStructure(), Wasm::errorMessageForExceptionType(type));
680         throwException(exec, throwScope, error);
681     }
682
683     genericUnwind(&vm, exec);
684     ASSERT(!!vm.callFrameForCatch);
685     ASSERT(!!vm.targetMachinePCForThrow);
686     // FIXME: We could make this better:
687     // This is a total hack, but the llint (both op_catch and handleUncaughtException)
688     // require a cell in the callee field to load the VM. (The baseline JIT does not require
689     // this since it is compiled with a constant VM pointer.) We could make the calling convention
690     // for exceptions first load callFrameForCatch info call frame register before jumping
691     // to the exception handler. If we did this, we could remove this terrible hack.
692     // https://bugs.webkit.org/show_bug.cgi?id=170440
693     bitwise_cast<uint64_t*>(exec)[CallFrameSlot::callee] = bitwise_cast<uint64_t>(instance->webAssemblyToJSCallee());
694     return vm.targetMachinePCForThrow;
695 }
696
697 } } // namespace JSC::Wasm
698
699 #endif // ENABLE(WEBASSEMBLY)